diff --git a/common/common_volcengine_version.go b/common/common_volcengine_version.go index b75ca6ae..3ea26f6a 100644 --- a/common/common_volcengine_version.go +++ b/common/common_volcengine_version.go @@ -2,5 +2,5 @@ package common const ( TerraformProviderName = "terraform-provider-volcengine" - TerraformProviderVersion = "0.0.146" + TerraformProviderVersion = "0.0.147" ) diff --git a/docgen/main.go b/docgen/main.go index bf1b7c0f..fad0a320 100644 --- a/docgen/main.go +++ b/docgen/main.go @@ -159,6 +159,7 @@ var resourceKeys = map[string]string{ "cdn": "CDN", "financial_relation": "FINANCIAL_RELATION", "cloud_identity": "CLOUD_IDENTITY", + "kafka": "KAFKA", } type Products struct { diff --git a/example/dataKafkaConsumedPartitions/main.tf b/example/dataKafkaConsumedPartitions/main.tf new file mode 100644 index 00000000..4071aff9 --- /dev/null +++ b/example/dataKafkaConsumedPartitions/main.tf @@ -0,0 +1,82 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_consumed_partitions" "default" { + instance_id = volcengine_kafka_instance.foo.id + group_id = volcengine_kafka_group.foo.group_id + topic_name = volcengine_kafka_topic.foo.topic_name +} diff --git a/example/dataKafkaConsumedTopics/main.tf b/example/dataKafkaConsumedTopics/main.tf new file mode 100644 index 00000000..b35c2683 --- /dev/null +++ b/example/dataKafkaConsumedTopics/main.tf @@ -0,0 +1,82 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_consumed_topics" "default" { + instance_id = volcengine_kafka_instance.foo.id + group_id = volcengine_kafka_group.foo.group_id + topic_name = volcengine_kafka_topic.foo.topic_name +} diff --git a/example/dataKafkaGroups/main.tf b/example/dataKafkaGroups/main.tf new file mode 100644 index 00000000..63370343 --- /dev/null +++ b/example/dataKafkaGroups/main.tf @@ -0,0 +1,51 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} + +data "volcengine_kafka_groups" "default" { + instance_id = volcengine_kafka_group.foo.instance_id +} diff --git a/example/dataKafkaInstances/main.tf b/example/dataKafkaInstances/main.tf new file mode 100644 index 00000000..f53e9bbb --- /dev/null +++ b/example/dataKafkaInstances/main.tf @@ -0,0 +1,45 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +data "volcengine_kafka_instances" "default" { + instance_id = volcengine_kafka_instance.foo.id +} diff --git a/example/dataKafkaRegions/main.tf b/example/dataKafkaRegions/main.tf new file mode 100644 index 00000000..ae307d35 --- /dev/null +++ b/example/dataKafkaRegions/main.tf @@ -0,0 +1,2 @@ +data "volcengine_kafka_regions" "default" { +} \ No newline at end of file diff --git a/example/dataKafkaSaslUsers/main.tf b/example/dataKafkaSaslUsers/main.tf new file mode 100644 index 00000000..dc6ae34e --- /dev/null +++ b/example/dataKafkaSaslUsers/main.tf @@ -0,0 +1,55 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +data "volcengine_kafka_sasl_users" "default" { + instance_id = volcengine_kafka_instance.foo.id + user_name = volcengine_kafka_sasl_user.foo.user_name +} diff --git a/example/dataKafkaTopicPartitions/main.tf b/example/dataKafkaTopicPartitions/main.tf new file mode 100644 index 00000000..0df59e7d --- /dev/null +++ b/example/dataKafkaTopicPartitions/main.tf @@ -0,0 +1,76 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_topic_partitions" "default" { + instance_id = volcengine_kafka_instance.foo.id + topic_name = volcengine_kafka_topic.foo.topic_name + partition_ids = [1, 2] +} diff --git a/example/dataKafkaTopics/main.tf b/example/dataKafkaTopics/main.tf new file mode 100644 index 00000000..f02bf394 --- /dev/null +++ b/example/dataKafkaTopics/main.tf @@ -0,0 +1,74 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_topics" "default" { + instance_id = volcengine_kafka_topic.foo.instance_id +} diff --git a/example/dataKafkaZones/main.tf b/example/dataKafkaZones/main.tf new file mode 100644 index 00000000..9fd9aba8 --- /dev/null +++ b/example/dataKafkaZones/main.tf @@ -0,0 +1,3 @@ +data "volcengine_kafka_zones" "default" { + region_id = "cn-beijing" +} \ No newline at end of file diff --git a/example/kafkaGroup/main.tf b/example/kafkaGroup/main.tf new file mode 100644 index 00000000..fd68777c --- /dev/null +++ b/example/kafkaGroup/main.tf @@ -0,0 +1,47 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} diff --git a/example/kafkaInstance/main.tf b/example/kafkaInstance/main.tf new file mode 100644 index 00000000..3728951a --- /dev/null +++ b/example/kafkaInstance/main.tf @@ -0,0 +1,53 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } + parameters { + parameter_name = "MessageTimestampType" + parameter_value = "CreateTime" + } + parameters { + parameter_name = "OffsetRetentionMinutes" + parameter_value = "10080" + } + parameters { + parameter_name = "AutoDeleteGroup" + parameter_value = "false" + } +} diff --git a/example/kafkaPublicAddress/main.tf b/example/kafkaPublicAddress/main.tf new file mode 100644 index 00000000..fce12d84 --- /dev/null +++ b/example/kafkaPublicAddress/main.tf @@ -0,0 +1,55 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_eip_address" "foo" { + billing_type = "PostPaidByBandwidth" + bandwidth = 1 + isp = "BGP" + name = "acc-test-eip" + description = "tf-test" + project_name = "default" +} + +resource "volcengine_kafka_public_address" "foo" { + instance_id = volcengine_kafka_instance.foo.id + eip_id = volcengine_eip_address.foo.id +} diff --git a/example/kafkaSaslUser/main.tf b/example/kafkaSaslUser/main.tf new file mode 100644 index 00000000..3fc495d0 --- /dev/null +++ b/example/kafkaSaslUser/main.tf @@ -0,0 +1,50 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} diff --git a/example/kafkaTopic/main.tf b/example/kafkaTopic/main.tf new file mode 100644 index 00000000..7de5aedc --- /dev/null +++ b/example/kafkaTopic/main.tf @@ -0,0 +1,70 @@ +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} diff --git a/example/tlsIndex/main.tf b/example/tlsIndex/main.tf index e64bb47c..b6a0fe9a 100644 --- a/example/tlsIndex/main.tf +++ b/example/tlsIndex/main.tf @@ -1,5 +1,5 @@ resource "volcengine_tls_index" "foo" { - topic_id = "7ce12237-6670-44a7-9d79-2e36961586e6" + topic_id = "227a8d0c-b85b-48df-bee1-0927a595****" # full_text { # case_sensitive = true @@ -13,7 +13,8 @@ resource "volcengine_tls_index" "foo" { case_sensitive = true delimiter = "!" include_chinese = false - sql_flag = false + sql_flag = true + index_all = true json_keys { key = "class" value_type = "text" diff --git a/volcengine/kafka/kafka_consumed_partition/data_source_volcengine_kafka_consumed_partitions.go b/volcengine/kafka/kafka_consumed_partition/data_source_volcengine_kafka_consumed_partitions.go new file mode 100644 index 00000000..6ba71d3d --- /dev/null +++ b/volcengine/kafka/kafka_consumed_partition/data_source_volcengine_kafka_consumed_partitions.go @@ -0,0 +1,83 @@ +package kafka_consumed_partition + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineKafkaConsumedPartitions() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaConsumedPartitionsRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka instance.", + }, + "group_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka group.", + }, + "topic_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of kafka topic.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "consumed_partitions": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "partition_id": { + Type: schema.TypeInt, + Computed: true, + Description: "The index number of partition.", + }, + "accumulation": { + Type: schema.TypeInt, + Computed: true, + Description: "The total amount of message accumulation in this topic partition for the consumer group.", + }, + "consumed_client": { + Type: schema.TypeString, + Computed: true, + Description: "The consumed client info of partition.", + }, + "consumed_offset": { + Type: schema.TypeInt, + Computed: true, + Description: "The consumed offset of partition.", + }, + "start_offset": { + Type: schema.TypeInt, + Computed: true, + Description: "The start offset of partition.", + }, + "end_offset": { + Type: schema.TypeInt, + Computed: true, + Description: "The end offset of partition.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaConsumedPartitionsRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaConsumedPartitionService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(service, d, DataSourceVolcengineKafkaConsumedPartitions()) +} diff --git a/volcengine/kafka/kafka_consumed_partition/service_volcengine_kafka_consumed_tpartition.go b/volcengine/kafka/kafka_consumed_partition/service_volcengine_kafka_consumed_tpartition.go new file mode 100644 index 00000000..486de3d7 --- /dev/null +++ b/volcengine/kafka/kafka_consumed_partition/service_volcengine_kafka_consumed_tpartition.go @@ -0,0 +1,112 @@ +package kafka_consumed_partition + +import ( + "encoding/json" + "errors" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" +) + +type VolcengineKafkaConsumedPartitionService struct { + Client *ve.SdkClient +} + +func NewKafkaConsumedPartitionService(c *ve.SdkClient) *VolcengineKafkaConsumedPartitionService { + return &VolcengineKafkaConsumedPartitionService{ + Client: c, + } +} + +func (s *VolcengineKafkaConsumedPartitionService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaConsumedPartitionService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + ) + return ve.WithPageNumberQuery(m, "PageSize", "PageNumber", 100, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeConsumedPartitions" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.ConsumedPartitionsInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + if data, ok = results.([]interface{}); !ok { + return data, errors.New("Result.ConsumedPartitionsInfo is not Slice") + } + return data, err + }) +} + +func (s *VolcengineKafkaConsumedPartitionService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + return data, nil +} + +func (s *VolcengineKafkaConsumedPartitionService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (VolcengineKafkaConsumedPartitionService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaConsumedPartitionService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaConsumedPartitionService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaConsumedPartitionService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaConsumedPartitionService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + CollectField: "consumed_partitions", + ContentType: ve.ContentTypeJson, + } +} + +func (s *VolcengineKafkaConsumedPartitionService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_consumed_topic/data_source_volcengine_kafka_consumed_topics.go b/volcengine/kafka/kafka_consumed_topic/data_source_volcengine_kafka_consumed_topics.go new file mode 100644 index 00000000..9a1eec03 --- /dev/null +++ b/volcengine/kafka/kafka_consumed_topic/data_source_volcengine_kafka_consumed_topics.go @@ -0,0 +1,63 @@ +package kafka_consumed_topic + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineKafkaConsumedTopics() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaConsumedTopicsRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka instance.", + }, + "group_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka group.", + }, + "topic_name": { + Type: schema.TypeString, + Optional: true, + Description: "The name of kafka topic. This field supports fuzzy query.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "consumed_topics": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "topic_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of kafka topic.", + }, + "accumulation": { + Type: schema.TypeInt, + Computed: true, + Description: "The total amount of message accumulation in this topic for the consumer group.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaConsumedTopicsRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaConsumedTopicService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(service, d, DataSourceVolcengineKafkaConsumedTopics()) +} diff --git a/volcengine/kafka/kafka_consumed_topic/service_volcengine_kafka_consumed_topic.go b/volcengine/kafka/kafka_consumed_topic/service_volcengine_kafka_consumed_topic.go new file mode 100644 index 00000000..5e465fe8 --- /dev/null +++ b/volcengine/kafka/kafka_consumed_topic/service_volcengine_kafka_consumed_topic.go @@ -0,0 +1,112 @@ +package kafka_consumed_topic + +import ( + "encoding/json" + "errors" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" +) + +type VolcengineKafkaConsumedTopicService struct { + Client *ve.SdkClient +} + +func NewKafkaConsumedTopicService(c *ve.SdkClient) *VolcengineKafkaConsumedTopicService { + return &VolcengineKafkaConsumedTopicService{ + Client: c, + } +} + +func (s *VolcengineKafkaConsumedTopicService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaConsumedTopicService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + ) + return ve.WithPageNumberQuery(m, "PageSize", "PageNumber", 100, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeConsumedTopics" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.ConsumedTopicsInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + if data, ok = results.([]interface{}); !ok { + return data, errors.New("Result.ConsumedTopicsInfo is not Slice") + } + return data, err + }) +} + +func (s *VolcengineKafkaConsumedTopicService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + return data, nil +} + +func (s *VolcengineKafkaConsumedTopicService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (VolcengineKafkaConsumedTopicService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaConsumedTopicService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaConsumedTopicService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaConsumedTopicService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaConsumedTopicService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + CollectField: "consumed_topics", + ContentType: ve.ContentTypeJson, + } +} + +func (s *VolcengineKafkaConsumedTopicService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_group/data_source_volcengine_kafka_groups.go b/volcengine/kafka/kafka_group/data_source_volcengine_kafka_groups.go new file mode 100644 index 00000000..ea0e2741 --- /dev/null +++ b/volcengine/kafka/kafka_group/data_source_volcengine_kafka_groups.go @@ -0,0 +1,65 @@ +package kafka_group + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineKafkaGroups() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaGroupsRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The instance id of kafka group.", + }, + "group_id": { + Type: schema.TypeString, + Optional: true, + Description: "The id of kafka group, support fuzzy matching.", + }, + "name_regex": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsValidRegExp, + Description: "A Name Regex of kafka group.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "groups": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "group_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of kafka group.", + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: "The state of kafka group.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaGroupsRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaGroupService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(service, d, DataSourceVolcengineKafkaGroups()) +} diff --git a/volcengine/kafka/kafka_group/resource_volcengine_kafka_group.go b/volcengine/kafka/kafka_group/resource_volcengine_kafka_group.go new file mode 100644 index 00000000..3d11c414 --- /dev/null +++ b/volcengine/kafka/kafka_group/resource_volcengine_kafka_group.go @@ -0,0 +1,111 @@ +package kafka_group + +import ( + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +/* + +Import +KafkaGroup can be imported using the instance_id:group_id, e.g. +``` +$ terraform import volcengine_kafka_group.default kafka-****x:groupId +``` + +*/ + +func ResourceVolcengineKafkaGroup() *schema.Resource { + resource := &schema.Resource{ + Create: resourceVolcengineKafkaGroupCreate, + Read: resourceVolcengineKafkaGroupRead, + Update: resourceVolcengineKafkaGroupUpdate, + Delete: resourceVolcengineKafkaGroupDelete, + Importer: &schema.ResourceImporter{ + State: func(data *schema.ResourceData, i interface{}) ([]*schema.ResourceData, error) { + items := strings.Split(data.Id(), ":") + if len(items) != 2 { + return []*schema.ResourceData{data}, fmt.Errorf("import id must split with ':'") + } + if err := data.Set("instance_id", items[0]); err != nil { + return []*schema.ResourceData{data}, err + } + if err := data.Set("group_id", items[1]); err != nil { + return []*schema.ResourceData{data}, err + } + return []*schema.ResourceData{data}, nil + }, + }, + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The instance id of kafka group.", + }, + "group_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The id of kafka group.", + }, + "description": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The description of kafka group.", + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: "The state of kafka group.", + }, + }, + } + return resource +} + +func resourceVolcengineKafkaGroupCreate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaGroupService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Create(service, d, ResourceVolcengineKafkaGroup()) + if err != nil { + return fmt.Errorf("error on creating kafka_group %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaGroupRead(d, meta) +} + +func resourceVolcengineKafkaGroupRead(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaGroupService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Read(service, d, ResourceVolcengineKafkaGroup()) + if err != nil { + return fmt.Errorf("error on reading kafka_group %q, %s", d.Id(), err) + } + return err +} + +func resourceVolcengineKafkaGroupUpdate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaGroupService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Update(service, d, ResourceVolcengineKafkaGroup()) + if err != nil { + return fmt.Errorf("error on updating kafka_group %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaGroupRead(d, meta) +} + +func resourceVolcengineKafkaGroupDelete(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaGroupService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Delete(service, d, ResourceVolcengineKafkaGroup()) + if err != nil { + return fmt.Errorf("error on deleting kafka_group %q, %s", d.Id(), err) + } + return err +} diff --git a/volcengine/kafka/kafka_group/service_volcengine_kafka_group.go b/volcengine/kafka/kafka_group/service_volcengine_kafka_group.go new file mode 100644 index 00000000..7e513e73 --- /dev/null +++ b/volcengine/kafka/kafka_group/service_volcengine_kafka_group.go @@ -0,0 +1,258 @@ +package kafka_group + +import ( + "encoding/json" + "errors" + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_instance" +) + +type VolcengineKafkaGroupService struct { + Client *ve.SdkClient +} + +func NewKafkaGroupService(c *ve.SdkClient) *VolcengineKafkaGroupService { + return &VolcengineKafkaGroupService{ + Client: c, + } +} + +func (s *VolcengineKafkaGroupService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaGroupService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + ) + return ve.WithPageNumberQuery(m, "PageSize", "PageNumber", 100, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeGroups" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.GroupsInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + if data, ok = results.([]interface{}); !ok { + return data, errors.New("Result.GroupsInfo is not Slice") + } + return data, err + }) +} + +func (s *VolcengineKafkaGroupService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + var ( + results []interface{} + ok bool + ) + if id == "" { + id = s.ReadResourceId(resourceData.Id()) + } + ids := strings.Split(id, ":") + if len(ids) != 2 { + return data, fmt.Errorf(" the id format must be 'instance_id:group_id'") + } + req := map[string]interface{}{ + "InstanceId": ids[0], + "GroupId": ids[1], + } + results, err = s.ReadResources(req) + if err != nil { + return data, err + } + for _, v := range results { + groupMap := make(map[string]interface{}) + if groupMap, ok = v.(map[string]interface{}); !ok { + return nil, errors.New("Value is not map ") + } + if groupMap["GroupId"] == ids[1] { // 通过名称匹配 + data = groupMap + break + } + } + if len(data) == 0 { + return data, fmt.Errorf("kafka_group %s not exist ", id) + } + return data, err +} + +func (s *VolcengineKafkaGroupService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (VolcengineKafkaGroupService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaGroupService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "CreateGroup", + ContentType: ve.ContentTypeJson, + ConvertMode: ve.RequestConvertAll, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + d.SetId(fmt.Sprintf("%v:%v", d.Get("instance_id"), d.Get("group_id"))) + return nil + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaGroupService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyGroup", + ContentType: ve.ContentTypeJson, + ConvertMode: ve.RequestConvertInConvert, + Convert: map[string]ve.RequestConvert{ + "description": { + TargetField: "Description", + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + if len(*call.SdkParam) > 0 { + (*call.SdkParam)["InstanceId"] = d.Get("instance_id") + (*call.SdkParam)["GroupId"] = d.Get("group_id") + return true, nil + } + return false, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaGroupService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + ids := strings.Split(resourceData.Id(), ":") + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "DeleteGroup", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + SdkParam: &map[string]interface{}{ + "InstanceID": ids[0], + "GroupID": ids[1], + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + return ve.CheckResourceUtilRemoved(d, s.ReadResource, 5*time.Minute) + }, + CallError: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall, baseErr error) error { + //出现错误后重试 + return resource.Retry(15*time.Minute, func() *resource.RetryError { + _, callErr := s.ReadResource(d, "") + if callErr != nil { + if ve.ResourceNotFoundError(callErr) { + return nil + } else { + return resource.NonRetryableError(fmt.Errorf("error on reading kafka group on delete %q, %w", d.Id(), callErr)) + } + } + _, callErr = call.ExecuteCall(d, client, call) + if callErr == nil { + return nil + } + return resource.RetryableError(callErr) + }) + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaGroupService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + NameField: "GroupId", + CollectField: "groups", + ContentType: ve.ContentTypeJson, + } +} + +func (s *VolcengineKafkaGroupService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_instance/data_source_volcengine_kafka_instances.go b/volcengine/kafka/kafka_instance/data_source_volcengine_kafka_instances.go new file mode 100644 index 00000000..8df784c6 --- /dev/null +++ b/volcengine/kafka/kafka_instance/data_source_volcengine_kafka_instances.go @@ -0,0 +1,319 @@ +package kafka_instance + +import ( + "bytes" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +var TagsHash = func(v interface{}) int { + if v == nil { + return hashcode.String("") + } + m := v.(map[string]interface{}) + var ( + buf bytes.Buffer + ) + buf.WriteString(fmt.Sprintf("%v#%v", m["key"], m["value"])) + return hashcode.String(buf.String()) +} + +func DataSourceVolcengineKafkaInstances() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaInstancesRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Optional: true, + Description: "The id of instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + Description: "The name of instance.", + }, + "instance_status": { + Type: schema.TypeString, + Optional: true, + Description: "The status of instance.", + }, + "zone_id": { + Type: schema.TypeString, + Optional: true, + Description: "The zone id of instance.", + }, + "tags": { + Type: schema.TypeSet, + Optional: true, + Description: "The tags of instance.", + Set: TagsHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + Description: "The key of tag.", + }, + "value": { + Type: schema.TypeString, + Required: true, + Description: "The value of tag.", + }, + }, + }, + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "instances": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "account_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of account.", + }, + "compute_spec": { + Type: schema.TypeString, + Computed: true, + Description: "The compute spec of instance.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "The create time of instance.", + }, + "eip_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of eip.", + }, + "instance_description": { + Type: schema.TypeString, + Computed: true, + Description: "The description of instance.", + }, + "instance_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of instance.", + }, + "instance_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of instance.", + }, + "instance_status": { + Type: schema.TypeString, + Computed: true, + Description: "The status of instance.", + }, + "id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of instance.", + }, + "region_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of region.", + }, + "zone_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of zone.", + }, + "vpc_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of vpc.", + }, + "version": { + Type: schema.TypeString, + Computed: true, + Description: "The version of instance.", + }, + "subnet_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of subnet.", + }, + "storage_type": { + Type: schema.TypeString, + Computed: true, + Description: "The storage type of instance.", + }, + "storage_space": { + Type: schema.TypeInt, + Computed: true, + Description: "The storage space of instance.", + }, + "usable_partition_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The usable partition number of instance.", + }, + "used_group_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The used group number of instance.", + }, + "used_partition_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The used partition number of instance.", + }, + "used_topic_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The used topic number of instance.", + }, + "used_storage_space": { + Type: schema.TypeInt, + Computed: true, + Description: "The used storage space of instance.", + }, + "private_domain_on_public": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether enable private domain on public.", + }, + "project_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of project.", + }, + "tags": { + Type: schema.TypeSet, + Computed: true, + Description: "The Tags of instance.", + Set: TagsHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Computed: true, + Description: "The key of tags.", + }, + "value": { + Type: schema.TypeString, + Computed: true, + Description: "The value of tags.", + }, + }, + }, + }, + // charge detail info + "charge_type": { + Type: schema.TypeString, + Computed: true, + Description: "The charge type of instance.", + }, + "charge_status": { + Type: schema.TypeString, + Computed: true, + Description: "The charge status of instance.", + }, + "charge_start_time": { + Type: schema.TypeString, + Computed: true, + Description: "The charge start time of instance.", + }, + "charge_expire_time": { + Type: schema.TypeString, + Computed: true, + Description: "The charge expire time of instance.", + }, + "overdue_time": { + Type: schema.TypeString, + Computed: true, + Description: "The overdue time of instance.", + }, + "overdue_reclaim_time": { + Type: schema.TypeString, + Computed: true, + Description: "The overdue reclaim time of instance.", + }, + "period_unit": { + Type: schema.TypeString, + Computed: true, + Description: "The period unit of instance.", + }, + "auto_renew": { + Type: schema.TypeBool, + Computed: true, + Description: "The auto renew status of instance.", + }, + "connection_info": { + Type: schema.TypeList, + Computed: true, + Description: "Connection info of the instance.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "endpoint_type": { + Type: schema.TypeString, + Computed: true, + Description: "The endpoint type of instance.", + }, + "network_type": { + Type: schema.TypeString, + Computed: true, + Description: "The network type of instance.", + }, + "internal_endpoint": { + Type: schema.TypeString, + Computed: true, + Description: "The internal endpoint of instance.", + }, + "public_endpoint": { + Type: schema.TypeString, + Computed: true, + Description: "The public endpoint of instance.", + }, + }, + }, + }, + // parameters + "parameters": { + Type: schema.TypeList, + Computed: true, + Description: "Parameters of the instance.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "parameter_name": { + Type: schema.TypeString, + Computed: true, + Description: "Parameter name.", + }, + "parameter_value": { + Type: schema.TypeString, + Computed: true, + Description: "Parameter value.", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaInstancesRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaInstanceService(meta.(*ve.SdkClient)) + return service.Dispatcher.Data(service, d, DataSourceVolcengineKafkaInstances()) +} diff --git a/volcengine/kafka/kafka_instance/resource_volcengine_kafka_instance.go b/volcengine/kafka/kafka_instance/resource_volcengine_kafka_instance.go new file mode 100644 index 00000000..f73e3d10 --- /dev/null +++ b/volcengine/kafka/kafka_instance/resource_volcengine_kafka_instance.go @@ -0,0 +1,265 @@ +package kafka_instance + +import ( + "bytes" + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +/* + +Import +KafkaInstance can be imported using the id, e.g. +``` +$ terraform import volcengine_kafka_instance.default kafka-insbjwbbwb +``` + +*/ + +func ResourceVolcengineKafkaInstance() *schema.Resource { + resource := &schema.Resource{ + Create: resourceVolcengineKafkaInstanceCreate, + Read: resourceVolcengineKafkaInstanceRead, + Update: resourceVolcengineKafkaInstanceUpdate, + Delete: resourceVolcengineKafkaInstanceDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + Schema: map[string]*schema.Schema{ + "version": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The version of instance, the value can be `2.2.2` or `2.8.2`.", + }, + "compute_spec": { + Type: schema.TypeString, + Required: true, + Description: "The compute spec of instance.", + }, + "subnet_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The subnet id of instance.", + }, + "user_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The user name of instance. " + + "When importing resources, this attribute will not be imported. If this attribute is set, please use lifecycle and ignore_changes ignore changes in fields.", + }, + "user_password": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Sensitive: true, + Description: "The user password of instance. " + + "When importing resources, this attribute will not be imported. If this attribute is set, please use lifecycle and ignore_changes ignore changes in fields.", + }, + "storage_space": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: "The storage space of instance.", + }, + "partition_number": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: "The partition number of instance.", + }, + "storage_type": { + Type: schema.TypeString, + Optional: true, + Default: "ESSD_FlexPL", + ForceNew: true, + Description: "The storage type of instance. The value can be ESSD_FlexPL or ESSD_PL0.", + }, + "project_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The project name of instance.", + }, + "instance_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The name of instance.", + }, + "need_rebalance": { + Type: schema.TypeBool, + Optional: true, + Default: false, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + if d.Id() == "" { + return true + } + if !d.HasChange("compute_spec") { + return true + } + return false + }, + Description: "Whether enable rebalance. Only effected in modify when compute_spec field is changed.", + }, + "rebalance_time": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + if d.Id() == "" { + return true + } + // 此参数仅在NeedRebalance为true时生效 + if !d.HasChange("compute_spec") || !d.Get("need_rebalance").(bool) { + return true + } + return false + }, + Description: "The rebalance time.", + }, + "instance_description": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The description of instance.", + }, + "charge_type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"PostPaid", "PrePaid"}, false), + Description: "The charge type of instance, the value can be `PrePaid` or `PostPaid`.", + }, + "auto_renew": { + Type: schema.TypeBool, + Optional: true, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + //在计费方式没有发生变化的时候 auto_renew 的变化会被忽略 + if !d.HasChange("charge_type") { + return true + } + if d.Get("charge_type").(string) == "PostPaid" { + return true + } + return false + }, + Description: "The auto renew flag of instance. Only effective when instance_charge_type is PrePaid. Default is false.", + }, + "period": { + Type: schema.TypeInt, + Optional: true, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + //在计费方式没有发生变化的时候 period的变化会被忽略 + if !d.HasChange("charge_type") { + return true + } + if d.Get("charge_type").(string) == "PostPaid" { + return true + } + return false + }, + Description: "The period of instance. Only effective when instance_charge_type is PrePaid. Unit is Month.", + }, + "tags": { + Type: schema.TypeSet, + Optional: true, + Description: "The tags of instance.", + Set: TagsHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "key": { + Type: schema.TypeString, + Required: true, + Description: "The Key of Tags.", + }, + "value": { + Type: schema.TypeString, + Required: true, + Description: "The Value of Tags.", + }, + }, + }, + }, + "parameters": { + Type: schema.TypeSet, + Optional: true, + Set: parameterHash, + Description: "Parameter of the instance.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "parameter_name": { + Type: schema.TypeString, + Required: true, + Description: "Parameter name.", + }, + "parameter_value": { + Type: schema.TypeString, + Required: true, + Description: "Parameter value.", + }, + }, + }, + }, + }, + } + return resource +} + +func resourceVolcengineKafkaInstanceCreate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInstanceService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Create(service, d, ResourceVolcengineKafkaInstance()) + if err != nil { + return fmt.Errorf("error on creating kafka_instance %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaInstanceRead(d, meta) +} + +func resourceVolcengineKafkaInstanceRead(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInstanceService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Read(service, d, ResourceVolcengineKafkaInstance()) + if err != nil { + return fmt.Errorf("error on reading kafka_instance %q, %s", d.Id(), err) + } + return err +} + +func resourceVolcengineKafkaInstanceUpdate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInstanceService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Update(service, d, ResourceVolcengineKafkaInstance()) + if err != nil { + return fmt.Errorf("error on updating kafka_instance %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaInstanceRead(d, meta) +} + +func resourceVolcengineKafkaInstanceDelete(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInstanceService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Delete(service, d, ResourceVolcengineKafkaInstance()) + if err != nil { + return fmt.Errorf("error on deleting kafka_instance %q, %s", d.Id(), err) + } + return err +} + +var parameterHash = func(v interface{}) int { + if v == nil { + return hashcode.String("") + } + m := v.(map[string]interface{}) + var ( + buf bytes.Buffer + ) + buf.WriteString(fmt.Sprintf("%v#%v", m["parameter_name"], m["parameter_value"])) + return hashcode.String(buf.String()) +} diff --git a/volcengine/kafka/kafka_instance/service_volcengine_kafka_instance.go b/volcengine/kafka/kafka_instance/service_volcengine_kafka_instance.go new file mode 100644 index 00000000..dd302904 --- /dev/null +++ b/volcengine/kafka/kafka_instance/service_volcengine_kafka_instance.go @@ -0,0 +1,609 @@ +package kafka_instance + +import ( + "encoding/json" + "errors" + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" + "github.com/volcengine/volcengine-go-sdk/service/vpc" +) + +type VolcengineKafkaInstanceService struct { + Client *ve.SdkClient + Dispatcher *ve.Dispatcher +} + +func NewKafkaInstanceService(c *ve.SdkClient) *VolcengineKafkaInstanceService { + return &VolcengineKafkaInstanceService{ + Client: c, + Dispatcher: &ve.Dispatcher{}, + } +} + +func (s *VolcengineKafkaInstanceService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaInstanceService) ReadResources(condition map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ) + if v, ok := condition["Tags"]; ok { + if len(v.(map[string]interface{})) == 0 { + delete(condition, "Tags") + } + } + return ve.WithPageNumberQuery(condition, "PageSize", "PageNumber", 100, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeInstances" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.InstancesInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + + for _, element := range results.([]interface{}) { + instance := element.(map[string]interface{}) + // 拆开 ChargeDetail + chargeInfo := instance["ChargeDetail"].(map[string]interface{}) + for k, v := range chargeInfo { + instance[k] = v + } + delete(instance, "ChargeDetail") + + // update tags + if v, ok := instance["Tags"]; ok { + var tags []interface{} + for k, v := range v.(map[string]interface{}) { + tags = append(tags, map[string]interface{}{ + "Key": k, + "Value": v, + }) + } + instance["Tags"] = tags + } + + // 获取 InstanceDetail 信息 + req := map[string]interface{}{ + "InstanceId": instance["InstanceId"], + } + logger.Debug(logger.ReqFormat, "DescribeInstanceDetail", req) + detail, err := s.Client.UniversalClient.DoCall(getUniversalInfo("DescribeInstanceDetail"), &req) + if err != nil { + return data, err + } + logger.Debug(logger.RespFormat, "DescribeInstanceDetail", req, *detail) + connection, err := ve.ObtainSdkValue("Result.ConnectionInfo", *detail) + if err != nil { + return data, err + } + instance["ConnectionInfo"] = connection + params, err := ve.ObtainSdkValue("Result.Parameters", *detail) + if err != nil { + return data, err + } + paramsMap := make(map[string]interface{}) + if err = json.Unmarshal([]byte(params.(string)), ¶msMap); err != nil { + return data, err + } + var paramsList []interface{} + for k, v := range paramsMap { + paramsList = append(paramsList, map[string]interface{}{ + "ParameterName": k, + "ParameterValue": v, + }) + } + instance["Parameters"] = paramsList + } + return results.([]interface{}), err + }) +} + +func (s *VolcengineKafkaInstanceService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + var ( + results []interface{} + ok bool + ) + if id == "" { + id = s.ReadResourceId(resourceData.Id()) + } + req := map[string]interface{}{ + "InstanceId": id, + } + results, err = s.ReadResources(req) + if err != nil { + return data, err + } + for _, v := range results { + if data, ok = v.(map[string]interface{}); !ok { + return data, errors.New("Value is not map ") + } + } + if len(data) == 0 { + return data, fmt.Errorf("kafka_instance %s not exist ", id) + } + // parameters 会有默认参数,防止不一致产生 + delete(data, "Parameters") + if parameterSet, ok := resourceData.GetOk("parameters"); ok { + if set, ok := parameterSet.(*schema.Set); ok { + data["Parameters"] = set.List() + } + } + return data, err +} + +func (s *VolcengineKafkaInstanceService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return &resource.StateChangeConf{ + Pending: []string{}, + // 15s后才能查询 ChargeInfo + Delay: 15 * time.Second, + MinTimeout: 1 * time.Second, + Target: target, + Timeout: timeout, + Refresh: func() (result interface{}, state string, err error) { + var ( + d map[string]interface{} + status interface{} + failStates []string + ) + failStates = append(failStates, "CreateFailed", "Error", "Fail", "Failed") + + if err = resource.Retry(20*time.Minute, func() *resource.RetryError { + d, err = s.ReadResource(resourceData, id) + if err != nil { + if ve.ResourceNotFoundError(err) { + return resource.RetryableError(err) + } else { + return resource.NonRetryableError(err) + } + } + return nil + }); err != nil { + return nil, "", err + } + + d, err = s.ReadResource(resourceData, id) + if err != nil { + return nil, "", err + } + status, err = ve.ObtainSdkValue("InstanceStatus", d) + if err != nil { + return nil, "", err + } + for _, v := range failStates { + if v == status.(string) { + return nil, "", fmt.Errorf("kafka_instance status error, status: %s", status.(string)) + } + } + return d, status.(string), err + }, + } +} + +func (s *VolcengineKafkaInstanceService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "CreateInstance", + ConvertMode: ve.RequestConvertAll, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "parameters": { + ConvertType: ve.ConvertJsonObjectArray, + }, + "tags": { + ConvertType: ve.ConvertJsonObjectArray, + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + subnetId := (*call.SdkParam)["SubnetId"].(string) + subnet, err := s.Client.VpcClient.DescribeSubnetAttributes(&vpc.DescribeSubnetAttributesInput{SubnetId: &subnetId}) + if err != nil { + return false, err + } + (*call.SdkParam)["ZoneId"] = *subnet.ZoneId + (*call.SdkParam)["VpcId"] = *subnet.VpcId + // update charge info + charge := make(map[string]interface{}) + if (*call.SdkParam)["ChargeType"] == "PrePaid" { + if (*call.SdkParam)["Period"] == nil || (*call.SdkParam)["Period"].(int) < 1 { + return false, fmt.Errorf("Instance Charge Type is PrePaid. Must set Period more than 1. ") + } + charge["PeriodUnit"] = "Month" + } + charge["ChargeType"] = (*call.SdkParam)["ChargeType"] + delete(*call.SdkParam, "ChargeType") + if v, ok := (*call.SdkParam)["AutoRenew"]; ok { + charge["AutoRenew"] = v + delete(*call.SdkParam, "AutoRenew") + } + if v, ok := (*call.SdkParam)["Period"]; ok { + charge["Period"] = v + delete(*call.SdkParam, "Period") + } + (*call.SdkParam)["ChargeInfo"] = charge + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + // update tags + if v, ok := (*call.SdkParam)["Tags"]; ok { + tags := v.([]interface{}) + if len(tags) > 0 { + temp := make(map[string]interface{}) + for _, ele := range tags { + e := ele.(map[string]interface{}) + temp[e["Key"].(string)] = e["Value"] + } + (*call.SdkParam)["Tags"] = temp + } + } + // update params + if v, ok := (*call.SdkParam)["Parameters"]; ok { + params := v.([]interface{}) + if len(params) > 0 { + temp := make(map[string]interface{}) + for _, ele := range params { + e := ele.(map[string]interface{}) + temp[e["ParameterName"].(string)] = e["ParameterValue"] + } + bytes, err := json.Marshal(&temp) + if err != nil { + return nil, err + } + (*call.SdkParam)["Parameters"] = string(bytes) + } + } + + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.ReqFormat, call.Action, *resp) + return resp, err + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + id, _ := ve.ObtainSdkValue("Result.InstanceId", *resp) + d.SetId(id.(string)) + return nil + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + }, + }, + } + return []ve.Callback{callback} +} + +func (VolcengineKafkaInstanceService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaInstanceService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + var res []ve.Callback + if resourceData.HasChange("instance_name") || resourceData.HasChange("instance_description") { + res = append(res, ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyInstanceAttributes", + ConvertMode: ve.RequestConvertInConvert, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "instance_name": { + TargetField: "InstanceName", + }, + "instance_description": { + TargetField: "InstanceDescription", + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + (*call.SdkParam)["InstanceId"] = d.Id() + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, *call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.ReqFormat, call.Action, *resp) + return resp, err + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + }, + }, + }) + } + if resourceData.HasChanges("compute_spec", "storage_space", "partition_number") { + res = append(res, ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyInstanceSpec", + ConvertMode: ve.RequestConvertInConvert, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "compute_spec": { + TargetField: "ComputeSpec", + }, + "storage_space": { + TargetField: "StorageSpace", + }, + "partition_number": { + TargetField: "PartitionNumber", + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + (*call.SdkParam)["InstanceId"] = d.Id() + if d.HasChange("compute_spec") { // 变更实例的计算规格时才需要选择是否再均衡 + if v, ok := d.GetOkExists("need_rebalance"); ok { + (*call.SdkParam)["NeedRebalance"] = v + } + if v, ok := d.GetOkExists("rebalance_time"); ok { + (*call.SdkParam)["RebalanceTime"] = v + } + } + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, *call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.ReqFormat, call.Action, *resp) + return resp, err + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + time.Sleep(10 * time.Second) + return nil + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + }, + }, + }) + } + + if resourceData.HasChange("parameters") { + parameterCallback := ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyInstanceParameters", + ContentType: ve.ContentTypeJson, + ConvertMode: ve.RequestConvertInConvert, + Convert: map[string]ve.RequestConvert{ + "parameters": { + ConvertType: ve.ConvertJsonObjectArray, + ForceGet: true, + }, + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + if _, exist := (*call.SdkParam)["Parameters"]; !exist { + return nil, nil + } + params := (*call.SdkParam)["Parameters"].([]interface{}) + if len(params) == 0 { + return nil, nil + } + temp := make(map[string]interface{}) + for _, ele := range params { + para := ele.(map[string]interface{}) + temp[para["ParameterName"].(string)] = para["ParameterValue"] + } + bytes, err := json.Marshal(&temp) + if err != nil { + return nil, err + } + (*call.SdkParam)["Parameters"] = string(bytes) + (*call.SdkParam)["InstanceId"] = d.Id() + + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + }, + }, + } + + res = append(res, parameterCallback) + } + if resourceData.HasChanges("charge_type") { + res = append(res, ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyInstanceChargeType", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + // 仅支持按量付费转包年包月 + if d.Get("charge_type") == "PostPaid" { + return false, fmt.Errorf("onny support PostPaid to PrePaid") + } + + if d.Get("charge_type") == "PrePaid" { + if d.Get("period") == nil || d.Get("period").(int) < 1 { + return false, fmt.Errorf("Instance Charge Type is PrePaid. Must set Period more than 1. ") + } + } + + (*call.SdkParam)["InstanceId"] = d.Id() + charge := make(map[string]interface{}) + charge["PeriodUnit"] = "Month" + charge["AutoRenew"] = d.Get("auto_renew") + charge["Period"] = d.Get("period") + charge["ChargeType"] = d.Get("charge_type") + (*call.SdkParam)["ChargeInfo"] = charge + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + }, + }, + }) + } + + // 更新Tags + res = s.setResourceTags(resourceData, res) + return res +} + +func (s *VolcengineKafkaInstanceService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "DeleteInstance", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + SdkParam: &map[string]interface{}{ + "InstanceId": resourceData.Id(), + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaInstanceService) setResourceTags(resourceData *schema.ResourceData, callbacks []ve.Callback) []ve.Callback { + addedTags, removedTags, _, _ := ve.GetSetDifference("tags", resourceData, TagsHash, false) + + removeCallback := ve.Callback{ + Call: ve.SdkCall{ + Action: "RemoveTagsFromResource", + ConvertMode: ve.RequestConvertIgnore, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + if removedTags != nil && len(removedTags.List()) > 0 { + (*call.SdkParam)["InstanceIds"] = []string{resourceData.Id()} + (*call.SdkParam)["TagKeys"] = make([]string, 0) + for _, tag := range removedTags.List() { + (*call.SdkParam)["TagKeys"] = append((*call.SdkParam)["TagKeys"].([]string), tag.(map[string]interface{})["key"].(string)) + } + return true, nil + } + return false, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + }, + } + callbacks = append(callbacks, removeCallback) + + addCallback := ve.Callback{ + Call: ve.SdkCall{ + Action: "AddTagsToResource", + ConvertMode: ve.RequestConvertIgnore, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + if addedTags != nil && len(addedTags.List()) > 0 { + (*call.SdkParam)["InstanceIds"] = []string{resourceData.Id()} + (*call.SdkParam)["Tags"] = make([]map[string]interface{}, 0) + for _, tag := range addedTags.List() { + t := tag.(map[string]interface{}) + temp := make(map[string]interface{}) + temp["Key"] = t["key"].(string) + temp["Value"] = t["value"].(string) + (*call.SdkParam)["Tags"] = append((*call.SdkParam)["Tags"].([]map[string]interface{}), temp) + } + return true, nil + } + return false, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + }, + } + callbacks = append(callbacks, addCallback) + + return callbacks +} + +func (s *VolcengineKafkaInstanceService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + RequestConverts: map[string]ve.RequestConvert{ + // Map 类型 + "tags": { + Convert: func(data *schema.ResourceData, i interface{}) interface{} { + tags := i.(*schema.Set).List() + res := make(map[string]interface{}) + for _, ele := range tags { + tag := ele.(map[string]interface{}) + res[tag["key"].(string)] = []interface{}{tag["value"]} + } + return res + }, + }, + }, + NameField: "InstanceName", + IdField: "InstanceId", + CollectField: "instances", + ResponseConverts: map[string]ve.ResponseConvert{ + "InstanceId": { + TargetField: "id", + KeepDefault: true, + }, + }, + } +} + +func (s *VolcengineKafkaInstanceService) ReadResourceId(id string) string { + return id +} + +func (s *VolcengineKafkaInstanceService) ProjectTrn() *ve.ProjectTrn { + return &ve.ProjectTrn{ + ServiceName: "Kafka", + ResourceType: "instance", + ProjectResponseField: "ProjectName", + ProjectSchemaField: "project_name", + } +} + +func (s *VolcengineKafkaInstanceService) UnsubscribeInfo(resourceData *schema.ResourceData, resource *schema.Resource) (*ve.UnsubscribeInfo, error) { + info := ve.UnsubscribeInfo{ + InstanceId: s.ReadResourceId(resourceData.Id()), + } + if resourceData.Get("charge_type") == "PrePaid" { + info.Products = []string{"Message_Queue_for_Kafka"} + info.NeedUnsubscribe = true + } + return &info, nil +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_public_address/resource_volcengine_kafka_public_address.go b/volcengine/kafka/kafka_public_address/resource_volcengine_kafka_public_address.go new file mode 100644 index 00000000..2b3ba467 --- /dev/null +++ b/volcengine/kafka/kafka_public_address/resource_volcengine_kafka_public_address.go @@ -0,0 +1,104 @@ +package kafka_public_address + +import ( + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +/* + +Import +KafkaPublicAddress can be imported using the instance_id:eip_id, e.g. +``` +$ terraform import volcengine_kafka_public_address.default instance_id:eip_id +``` + +*/ + +func ResourceVolcengineKafkaPublicAddress() *schema.Resource { + resource := &schema.Resource{ + Create: resourceVolcengineKafkaPublicAddressCreate, + Read: resourceVolcengineKafkaPublicAddressRead, + Delete: resourceVolcengineKafkaPublicAddressDelete, + Importer: &schema.ResourceImporter{ + State: func(data *schema.ResourceData, i interface{}) ([]*schema.ResourceData, error) { + items := strings.Split(data.Id(), ":") + if len(items) != 2 { + return []*schema.ResourceData{data}, fmt.Errorf("import id must split with ':'") + } + if err := data.Set("eip_id", items[1]); err != nil { + return []*schema.ResourceData{data}, err + } + if err := data.Set("instance_id", items[0]); err != nil { + return []*schema.ResourceData{data}, err + } + return []*schema.ResourceData{data}, nil + }, + }, + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka instance.", + ForceNew: true, + }, + "eip_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of eip.", + ForceNew: true, + }, + "endpoint_type": { + Type: schema.TypeString, + Computed: true, + Description: "The endpoint type of instance.", + }, + "network_type": { + Type: schema.TypeString, + Computed: true, + Description: "The network type of instance.", + }, + "public_endpoint": { + Type: schema.TypeString, + Computed: true, + Description: "The public endpoint of instance.", + }, + }, + } + return resource +} + +func resourceVolcengineKafkaPublicAddressCreate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInternetEnablerService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Create(service, d, ResourceVolcengineKafkaPublicAddress()) + if err != nil { + return fmt.Errorf("error on creating kafka public address %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaPublicAddressRead(d, meta) +} + +func resourceVolcengineKafkaPublicAddressRead(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInternetEnablerService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Read(service, d, ResourceVolcengineKafkaPublicAddress()) + if err != nil { + return fmt.Errorf("error on reading kafka public address %q, %s", d.Id(), err) + } + return err +} + +func resourceVolcengineKafkaPublicAddressDelete(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaInternetEnablerService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Delete(service, d, ResourceVolcengineKafkaPublicAddress()) + if err != nil { + return fmt.Errorf("error on deleting kafka public address %q, %s", d.Id(), err) + } + return err +} diff --git a/volcengine/kafka/kafka_public_address/service_volcengine_kafka_public_address.go b/volcengine/kafka/kafka_public_address/service_volcengine_kafka_public_address.go new file mode 100644 index 00000000..7255f814 --- /dev/null +++ b/volcengine/kafka/kafka_public_address/service_volcengine_kafka_public_address.go @@ -0,0 +1,168 @@ +package kafka_public_address + +import ( + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_instance" +) + +type VolcengineKafkaPublicAddressService struct { + Client *ve.SdkClient + Dispatcher *ve.Dispatcher +} + +func NewKafkaInternetEnablerService(c *ve.SdkClient) *VolcengineKafkaPublicAddressService { + return &VolcengineKafkaPublicAddressService{ + Client: c, + Dispatcher: &ve.Dispatcher{}, + } +} + +func (s *VolcengineKafkaPublicAddressService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaPublicAddressService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + return nil, nil +} + +func (s *VolcengineKafkaPublicAddressService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + var ( + resp *map[string]interface{} + ) + if id == "" { + id = s.ReadResourceId(resourceData.Id()) + } + ids := strings.Split(id, ":") + req := map[string]interface{}{ + "InstanceID": ids[0], + } + logger.Debug(logger.ReqFormat, "DescribeInstanceDetail", req) + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo("DescribeInstanceDetail"), &req) + if err != nil { + return data, err + } + logger.Debug(logger.RespFormat, "DescribeInstanceDetail", req, *resp) + eip, err := ve.ObtainSdkValue("Result.BasicInstanceInfo.EipId", *resp) + if err != nil { + return data, err + } + if eip == nil || eip != resourceData.Get("eip_id") { + return nil, fmt.Errorf("instance_id and eip not associate") + } + + connection, err := ve.ObtainSdkValue("Result.ConnectionInfo", *resp) + if err != nil { + return data, err + } + for _, ele := range connection.([]interface{}) { + conn := ele.(map[string]interface{}) + if conn["EndpointType"] == "SASL_SSL" { + return conn, nil + } + } + return nil, fmt.Errorf("instance_id and eip not associate") +} + +func (s *VolcengineKafkaPublicAddressService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (s *VolcengineKafkaPublicAddressService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "CreatePublicAddress", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + (*call.SdkParam)["InstanceId"] = resourceData.Get("instance_id") + (*call.SdkParam)["EipId"] = resourceData.Get("eip_id") + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + d.SetId(fmt.Sprintf("%s:%s", resourceData.Get("instance_id"), resourceData.Get("eip_id"))) + return nil + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (VolcengineKafkaPublicAddressService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaPublicAddressService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaPublicAddressService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "DeletePublicAddress", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + SdkParam: &map[string]interface{}{ + "InstanceId": resourceData.Get("instance_id"), + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaPublicAddressService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{} +} + +func (s *VolcengineKafkaPublicAddressService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_region/data_source_volcengine_kafka_regions.go b/volcengine/kafka/kafka_region/data_source_volcengine_kafka_regions.go new file mode 100644 index 00000000..5307f2ba --- /dev/null +++ b/volcengine/kafka/kafka_region/data_source_volcengine_kafka_regions.go @@ -0,0 +1,58 @@ +package kafka_region + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineRegions() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineRegionsRead, + Schema: map[string]*schema.Schema{ + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of region query.", + }, + "regions": { + Description: "The collection of region query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "region_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of the region.", + }, + "region_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of region.", + }, + "description": { + Type: schema.TypeString, + Computed: true, + Description: "The description of region.", + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: "The status of region.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineRegionsRead(d *schema.ResourceData, meta interface{}) error { + regionService := NewRegionService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(regionService, d, DataSourceVolcengineRegions()) +} diff --git a/volcengine/kafka/kafka_region/service_volcengine_kafka_region.go b/volcengine/kafka/kafka_region/service_volcengine_kafka_region.go new file mode 100644 index 00000000..f591db21 --- /dev/null +++ b/volcengine/kafka/kafka_region/service_volcengine_kafka_region.go @@ -0,0 +1,109 @@ +package kafka_region + +import ( + "errors" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" +) + +type VolcengineRegionService struct { + Client *ve.SdkClient +} + +func NewRegionService(c *ve.SdkClient) *VolcengineRegionService { + return &VolcengineRegionService{ + Client: c, + } +} + +func (s *VolcengineRegionService) GetClient() *ve.SdkClient { + return s.Client +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} + +func (s *VolcengineRegionService) ReadResources(condition map[string]interface{}) ([]interface{}, error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + err error + data []interface{} + ) + action := "DescribeRegions" + logger.Debug(logger.ReqFormat, action, condition) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + } + if err != nil { + return nil, err + } + logger.Debug(logger.RespFormat, action, condition, *resp) + + results, err = ve.ObtainSdkValue("Result.Regions", *resp) + if err != nil { + return nil, err + } + if results == nil { + results = make([]interface{}, 0) + } + + if data, ok = results.([]interface{}); !ok { + return nil, errors.New("Result.Regions is not Slice") + } + + return data, nil +} + +func (s *VolcengineRegionService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + return nil, nil +} + +func (s *VolcengineRegionService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (s *VolcengineRegionService) WithResourceResponseHandlers(zone map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return zone, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineRegionService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineRegionService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) (callbacks []ve.Callback) { + return callbacks +} + +func (s *VolcengineRegionService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineRegionService) DatasourceResources(data *schema.ResourceData, resource *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + NameField: "RegionName", + IdField: "RegionId", + CollectField: "regions", + } +} + +func (s *VolcengineRegionService) ReadResourceId(id string) string { + return id +} diff --git a/volcengine/kafka/kafka_sasl_user/data_source_volcengine_kafka_sasl_users.go b/volcengine/kafka/kafka_sasl_user/data_source_volcengine_kafka_sasl_users.go new file mode 100644 index 00000000..913ac382 --- /dev/null +++ b/volcengine/kafka/kafka_sasl_user/data_source_volcengine_kafka_sasl_users.go @@ -0,0 +1,73 @@ +package kafka_sasl_user + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineKafkaSaslUsers() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaSaslUsersRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of instance.", + }, + "user_name": { + Type: schema.TypeString, + Optional: true, + Description: "The user name, support fuzzy matching.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "users": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "The create time.", + }, + "user_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of user.", + }, + "description": { + Type: schema.TypeString, + Computed: true, + Description: "The description of user.", + }, + "password_type": { + Type: schema.TypeString, + Computed: true, + Description: "The type of password.", + }, + "all_authority": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether this user has read and write permissions for all topics.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaSaslUsersRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaSaslUserService(meta.(*ve.SdkClient)) + return service.Dispatcher.Data(service, d, DataSourceVolcengineKafkaSaslUsers()) +} diff --git a/volcengine/kafka/kafka_sasl_user/resource_volcengine_kafka_sasl_user.go b/volcengine/kafka/kafka_sasl_user/resource_volcengine_kafka_sasl_user.go new file mode 100644 index 00000000..d5747308 --- /dev/null +++ b/volcengine/kafka/kafka_sasl_user/resource_volcengine_kafka_sasl_user.go @@ -0,0 +1,126 @@ +package kafka_sasl_user + +import ( + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +/* + +Import +KafkaSaslUser can be imported using the kafka_id:username, e.g. +``` +$ terraform import volcengine_kafka_sasl_user.default kafka-cnngbnntswg1****:tfuser +``` + +*/ + +func ResourceVolcengineKafkaSaslUser() *schema.Resource { + resource := &schema.Resource{ + Create: resourceVolcengineKafkaSaslUserCreate, + Read: resourceVolcengineKafkaSaslUserRead, + Update: resourceVolcengineKafkaSaslUserUpdate, + Delete: resourceVolcengineKafkaSaslUserDelete, + Importer: &schema.ResourceImporter{ + State: func(data *schema.ResourceData, i interface{}) ([]*schema.ResourceData, error) { + items := strings.Split(data.Id(), ":") + if len(items) != 2 { + return []*schema.ResourceData{data}, fmt.Errorf("import id must split with ':'") + } + if err := data.Set("user_name", items[1]); err != nil { + return []*schema.ResourceData{data}, err + } + if err := data.Set("instance_id", items[0]); err != nil { + return []*schema.ResourceData{data}, err + } + return []*schema.ResourceData{data}, nil + }, + }, + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of instance.", + ForceNew: true, + }, + "user_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of user.", + ForceNew: true, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: "The description of user.", + ForceNew: true, // 不支持修改 + }, + "user_password": { + Type: schema.TypeString, + Required: true, + Description: "The password of user.", + Sensitive: true, + ForceNew: true, + }, + "all_authority": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Whether this user has read and write permissions for all topics. Default is true.", + }, + "password_type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: "Plain", + Description: "The type of password. Valid values are `Scram` and `Plain`. Default is `Plain`.", + }, + }, + } + return resource +} + +func resourceVolcengineKafkaSaslUserCreate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaSaslUserService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Create(service, d, ResourceVolcengineKafkaSaslUser()) + if err != nil { + return fmt.Errorf("error on creating kafka_sasl_user %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaSaslUserRead(d, meta) +} + +func resourceVolcengineKafkaSaslUserRead(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaSaslUserService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Read(service, d, ResourceVolcengineKafkaSaslUser()) + if err != nil { + return fmt.Errorf("error on reading kafka_sasl_user %q, %s", d.Id(), err) + } + return err +} + +func resourceVolcengineKafkaSaslUserUpdate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaSaslUserService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Update(service, d, ResourceVolcengineKafkaSaslUser()) + if err != nil { + return fmt.Errorf("error on updating kafka_sasl_user %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaSaslUserRead(d, meta) +} + +func resourceVolcengineKafkaSaslUserDelete(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaSaslUserService(meta.(*ve.SdkClient)) + err = service.Dispatcher.Delete(service, d, ResourceVolcengineKafkaSaslUser()) + if err != nil { + return fmt.Errorf("error on deleting kafka_sasl_user %q, %s", d.Id(), err) + } + return err +} diff --git a/volcengine/kafka/kafka_sasl_user/service_volcengine_kafka_sasl_user.go b/volcengine/kafka/kafka_sasl_user/service_volcengine_kafka_sasl_user.go new file mode 100644 index 00000000..bd8f7789 --- /dev/null +++ b/volcengine/kafka/kafka_sasl_user/service_volcengine_kafka_sasl_user.go @@ -0,0 +1,260 @@ +package kafka_sasl_user + +import ( + "encoding/json" + "errors" + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" +) + +type VolcengineKafkaSaslUserService struct { + Client *ve.SdkClient + Dispatcher *ve.Dispatcher +} + +func NewKafkaSaslUserService(c *ve.SdkClient) *VolcengineKafkaSaslUserService { + return &VolcengineKafkaSaslUserService{ + Client: c, + Dispatcher: &ve.Dispatcher{}, + } +} + +func (s *VolcengineKafkaSaslUserService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaSaslUserService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + ) + return ve.WithPageNumberQuery(m, "PageSize", "PageNumber", 100, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeUsers" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.UsersInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + if data, ok = results.([]interface{}); !ok { + return data, errors.New("Result.UsersInfo is not Slice") + } + return data, err + }) +} + +func (s *VolcengineKafkaSaslUserService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + var ( + results []interface{} + ok bool + ) + if id == "" { + id = s.ReadResourceId(resourceData.Id()) + } + ids := strings.Split(id, ":") + req := map[string]interface{}{ + "InstanceId": ids[0], + "UserName": ids[1], + } + results, err = s.ReadResources(req) + if err != nil { + return data, err + } + for _, v := range results { + if _, ok = v.(map[string]interface{}); !ok { + return nil, errors.New("Value is not map ") + } + if v.(map[string]interface{})["UserName"] == ids[1] { // 通过名称匹配 + data = v.(map[string]interface{}) + } + } + if len(data) == 0 { + return data, fmt.Errorf("kafka_sasl_user %s not exist ", id) + } + return data, err +} + +func (s *VolcengineKafkaSaslUserService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (s *VolcengineKafkaSaslUserService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + var callbacks []ve.Callback + // mlp 定制化 + instanceId := resourceData.Get("instance_id") + userName := resourceData.Get("user_name") + _, err := s.ReadResource(resourceData, fmt.Sprintf("%v:%v", instanceId, userName)) + if err == nil { + // 事先有用户,先删除 + deleteCallback := ve.Callback{ + Call: ve.SdkCall{ + Action: "DeleteUser", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + SdkParam: &map[string]interface{}{ + "InstanceId": instanceId, + "UserName": userName, + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + }, + } + callbacks = append(callbacks, deleteCallback) + } + // mpl 定制化 + + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "CreateUser", + ConvertMode: ve.RequestConvertAll, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "all_authority": { + Convert: func(data *schema.ResourceData, i interface{}) interface{} { + v, ok := data.GetOkExists("all_authority") + if !ok { + return false + } + return v + }, + }, + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + d.SetId(fmt.Sprintf("%v:%v", d.Get("instance_id"), d.Get("user_name"))) + return nil + }, + }, + } + callbacks = append(callbacks, callback) + return callbacks +} + +func (VolcengineKafkaSaslUserService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaSaslUserService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + res := make([]ve.Callback, 0) + ids := strings.Split(resourceData.Id(), ":") + if resourceData.HasChange("all_authority") { + res = append(res, ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyUserAuthority", + ConvertMode: ve.RequestConvertInConvert, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "all_authority": { + Convert: func(data *schema.ResourceData, i interface{}) interface{} { + v, ok := data.GetOkExists("all_authority") + if !ok { + return false + } + return v + }, + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + (*call.SdkParam)["InstanceId"] = ids[0] + (*call.SdkParam)["UserName"] = ids[1] + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + }, + }) + } + return res +} + +func (s *VolcengineKafkaSaslUserService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + ids := strings.Split(resourceData.Id(), ":") + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "DeleteUser", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + SdkParam: &map[string]interface{}{ + "InstanceId": ids[0], + "UserName": ids[1], + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + return ve.CheckResourceUtilRemoved(d, s.ReadResource, 5*time.Minute) + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaSaslUserService) DatasourceResources(d *schema.ResourceData, resource *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + NameField: "UserName", + IdField: "UserId", + CollectField: "users", + ExtraData: func(sourceData []interface{}) ([]interface{}, error) { + var next []interface{} + for _, i := range sourceData { + v := i.(map[string]interface{}) + v["UserId"] = fmt.Sprintf("%s:%s", d.Get("instance_id"), v["UserName"]) + next = append(next, i) + } + return next, nil + }, + } +} + +func (s *VolcengineKafkaSaslUserService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_topic/common_volcengine_kafka_topic.go b/volcengine/kafka/kafka_topic/common_volcengine_kafka_topic.go new file mode 100644 index 00000000..ef8e2100 --- /dev/null +++ b/volcengine/kafka/kafka_topic/common_volcengine_kafka_topic.go @@ -0,0 +1,36 @@ +package kafka_topic + +import ( + "bytes" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" +) + +var kafkaTopicImporter = func(data *schema.ResourceData, i interface{}) ([]*schema.ResourceData, error) { + items := strings.Split(data.Id(), ":") + if len(items) != 2 { + return []*schema.ResourceData{data}, fmt.Errorf("import id must split with ':'") + } + if err := data.Set("instance_id", items[0]); err != nil { + return []*schema.ResourceData{data}, err + } + if err := data.Set("topic_name", items[1]); err != nil { + return []*schema.ResourceData{data}, err + } + return []*schema.ResourceData{data}, nil +} + +var kafkaAccessPolicyHash = func(v interface{}) int { + if v == nil { + return hashcode.String("") + } + m := v.(map[string]interface{}) + var ( + buf bytes.Buffer + ) + buf.WriteString(fmt.Sprintf("%v#%v", m["user_name"], m["access_policy"])) + return hashcode.String(buf.String()) +} diff --git a/volcengine/kafka/kafka_topic/data_source_volcengine_kafka_topics.go b/volcengine/kafka/kafka_topic/data_source_volcengine_kafka_topics.go new file mode 100644 index 00000000..ffca4f7f --- /dev/null +++ b/volcengine/kafka/kafka_topic/data_source_volcengine_kafka_topics.go @@ -0,0 +1,149 @@ +package kafka_topic + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineKafkaTopics() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaTopicsRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka instance.", + }, + "topic_name": { + Type: schema.TypeString, + Optional: true, + Description: "The name of kafka topic. This field supports fuzzy query.", + }, + "partition_number": { + Type: schema.TypeInt, + Optional: true, + Description: "The number of partition in kafka topic.", + }, + "replica_number": { + Type: schema.TypeInt, + Optional: true, + Description: "The number of replica in kafka topic.", + }, + "user_name": { + Type: schema.TypeString, + Optional: true, + Description: "When a user name is specified, only the access policy of the specified user for this Topic will be returned.", + }, + "name_regex": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringIsValidRegExp, + Description: "A Name Regex of kafka topic.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "topics": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "topic_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of the kafka topic.", + }, + "description": { + Type: schema.TypeString, + Computed: true, + Description: "The description of the kafka topic.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "The create time of the kafka topic.", + }, + "partition_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The number of partition in the kafka topic.", + }, + "replica_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The number of replica in the kafka topic.", + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: "The status of the kafka topic.", + }, + "parameters": { + Type: schema.TypeList, + Computed: true, + MaxItems: 1, + Description: "The parameters of the kafka topic.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "min_insync_replica_number": { + Type: schema.TypeInt, + Computed: true, + Description: "The min number of sync replica.", + }, + "message_max_byte": { + Type: schema.TypeInt, + Computed: true, + Description: "The max byte of message.", + }, + "log_retention_hours": { + Type: schema.TypeInt, + Computed: true, + Description: "The retention hours of log.", + }, + }, + }, + }, + "all_authority": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether the kafka topic is configured to be accessible by all users.", + }, + "access_policies": { + Type: schema.TypeList, + Computed: true, + Description: "The access policies info of the kafka topic.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "user_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of SASL user.", + }, + "access_policy": { + Type: schema.TypeString, + Computed: true, + Description: "The access policy of SASL user.", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaTopicsRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaTopicService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(service, d, DataSourceVolcengineKafkaTopics()) +} diff --git a/volcengine/kafka/kafka_topic/resource_volcengine_kafka_topic.go b/volcengine/kafka/kafka_topic/resource_volcengine_kafka_topic.go new file mode 100644 index 00000000..88f8c37d --- /dev/null +++ b/volcengine/kafka/kafka_topic/resource_volcengine_kafka_topic.go @@ -0,0 +1,168 @@ +package kafka_topic + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +/* + +Import +KafkaTopic can be imported using the instance_id:topic_name, e.g. +``` +$ terraform import volcengine_kafka_topic.default kafka-cnoeeapetf4s****:topic +``` + +*/ + +func ResourceVolcengineKafkaTopic() *schema.Resource { + resource := &schema.Resource{ + Create: resourceVolcengineKafkaTopicCreate, + Read: resourceVolcengineKafkaTopicRead, + Update: resourceVolcengineKafkaTopicUpdate, + Delete: resourceVolcengineKafkaTopicDelete, + Importer: &schema.ResourceImporter{ + State: kafkaTopicImporter, + }, + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The instance id of the kafka topic.", + }, + "topic_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The name of the kafka topic.", + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: "The description of the kafka topic.", + }, + "partition_number": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 300), + Description: "The number of partition in kafka topic. The value range in 1-300. This field can only be adjusted up but not down.", + }, + "replica_number": { + Type: schema.TypeInt, + Optional: true, + ForceNew: true, // 不支持修改 + Default: 3, + ValidateFunc: validation.IntInSlice([]int{2, 3}), + Description: "The number of replica in kafka topic. The value can be 2 or 3. Default is 3.", + }, + "parameters": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Description: "The parameters of the kafka topic.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "min_insync_replica_number": { + Type: schema.TypeInt, + Optional: true, + Computed: true, + Description: "The min number of sync replica. The default value is the replica number minus 1.", + }, + "message_max_byte": { + Type: schema.TypeInt, + Optional: true, + Default: 10, + ValidateFunc: validation.IntBetween(1, 12), + Description: "The max byte of message. Unit: MB. Valid values: 1-12. Default is 10.", + }, + "log_retention_hours": { + Type: schema.TypeInt, + Optional: true, + Default: 72, + ValidateFunc: validation.IntBetween(0, 2160), + Description: "The retention hours of log. Unit: hour. Valid values: 0-2160. Default is 72.", + }, + }, + }, + }, + "all_authority": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Whether the kafka topic is configured to be accessible by all users. Default: true.", + }, + "access_policies": { + Type: schema.TypeSet, + Optional: true, + Set: kafkaAccessPolicyHash, + Description: "The access policies info of the kafka topic. This field only valid when the value of the AllAuthority is false.", + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return d.Get("all_authority").(bool) + }, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "user_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of SASL user.", + }, + "access_policy": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"PubSub", "Pub", "Sub"}, false), + Description: "The access policy of SASL user. Valid values: `PubSub`, `Pub`, `Sub`.", + }, + }, + }, + }, + }, + } + return resource +} + +func resourceVolcengineKafkaTopicCreate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaTopicService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Create(service, d, ResourceVolcengineKafkaTopic()) + if err != nil { + return fmt.Errorf("error on creating kafka_topic %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaTopicRead(d, meta) +} + +func resourceVolcengineKafkaTopicRead(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaTopicService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Read(service, d, ResourceVolcengineKafkaTopic()) + if err != nil { + return fmt.Errorf("error on reading kafka_topic %q, %s", d.Id(), err) + } + return err +} + +func resourceVolcengineKafkaTopicUpdate(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaTopicService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Update(service, d, ResourceVolcengineKafkaTopic()) + if err != nil { + return fmt.Errorf("error on updating kafka_topic %q, %s", d.Id(), err) + } + return resourceVolcengineKafkaTopicRead(d, meta) +} + +func resourceVolcengineKafkaTopicDelete(d *schema.ResourceData, meta interface{}) (err error) { + service := NewKafkaTopicService(meta.(*ve.SdkClient)) + err = ve.DefaultDispatcher().Delete(service, d, ResourceVolcengineKafkaTopic()) + if err != nil { + return fmt.Errorf("error on deleting kafka_topic %q, %s", d.Id(), err) + } + return err +} diff --git a/volcengine/kafka/kafka_topic/service_volcengine_kafka_topic.go b/volcengine/kafka/kafka_topic/service_volcengine_kafka_topic.go new file mode 100644 index 00000000..05f9b7fa --- /dev/null +++ b/volcengine/kafka/kafka_topic/service_volcengine_kafka_topic.go @@ -0,0 +1,563 @@ +package kafka_topic + +import ( + "encoding/json" + "errors" + "fmt" + "strconv" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_instance" +) + +type VolcengineKafkaTopicService struct { + Client *ve.SdkClient +} + +func NewKafkaTopicService(c *ve.SdkClient) *VolcengineKafkaTopicService { + return &VolcengineKafkaTopicService{ + Client: c, + } +} + +func (s *VolcengineKafkaTopicService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaTopicService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + ) + return ve.WithPageNumberQuery(m, "PageSize", "PageNumber", 20, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeTopics" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.TopicsInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + if data, ok = results.([]interface{}); !ok { + return data, errors.New("Result.TopicsInfo is not Slice") + } + + for _, ele := range data { + topic, ok := ele.(map[string]interface{}) + if !ok { + return data, fmt.Errorf(" Topic is not Map ") + } + // 查询参数信息 + action := "DescribeTopicParameters" + req := map[string]interface{}{ + "InstanceId": m["InstanceId"], + "TopicName": topic["TopicName"], + } + logger.Debug(logger.ReqFormat, action, req) + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &req) + if err != nil { + return data, err + } + logger.Debug(logger.RespFormat, action, req, *resp) + paramStr, err := ve.ObtainSdkValue("Result.Parameters", *resp) + if err != nil { + return data, err + } + param := make(map[string]interface{}) + err = json.Unmarshal([]byte(paramStr.(string)), ¶m) + if err != nil { + return data, fmt.Errorf(" json Unmarshal Parameters error: %v", err) + } + logger.DebugInfo(" Unmarshal Parameters", param) + param["MinInsyncReplicaNumber"], _ = strconv.Atoi(param["MinInsyncReplicaNumber"].(string)) + param["MessageMaxByte"], _ = strconv.Atoi(param["MessageMaxByte"].(string)) + param["LogRetentionHours"], _ = strconv.Atoi(param["LogRetentionHours"].(string)) + topic["Parameters"] = param + + // 查询权限信息 + action = "DescribeTopicAccessPolicies" + con := map[string]interface{}{ + "InstanceId": m["InstanceId"], + "TopicName": topic["TopicName"], + } + if userName, exist := m["UserName"]; exist && (len(userName.(string)) > 0) { + con["UserName"] = userName + } + logger.Debug(logger.ReqFormat, action, req) + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &con) + if err != nil { + return data, err + } + logger.Debug(logger.RespFormat, action, req, *resp) + + accessPolicies, err := ve.ObtainSdkValue("Result", *resp) + if err != nil { + return data, err + } + apMap, ok := accessPolicies.(map[string]interface{}) + if !ok { + return data, fmt.Errorf(" Result is not Map ") + } + for k, v := range apMap { + topic[k] = v + } + } + + return data, err + }) +} + +func (s *VolcengineKafkaTopicService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + var ( + results []interface{} + ok bool + ) + if id == "" { + id = s.ReadResourceId(resourceData.Id()) + } + ids := strings.Split(id, ":") + if len(ids) != 2 { + return data, fmt.Errorf(" the id format must be 'instance_id:topic_name'") + } + req := map[string]interface{}{ + "InstanceId": ids[0], + "TopicName": ids[1], + } + results, err = s.ReadResources(req) + if err != nil { + return data, err + } + for _, v := range results { + topicMap := make(map[string]interface{}) + if topicMap, ok = v.(map[string]interface{}); !ok { + return nil, errors.New("Value is not map ") + } + if topicMap["TopicName"].(string) == ids[1] { + data = topicMap + break + } + } + if len(data) == 0 { + return data, fmt.Errorf("kafka_topic %s not exist ", id) + } + return data, err +} + +func (s *VolcengineKafkaTopicService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return &resource.StateChangeConf{ + Pending: []string{}, + Delay: 1 * time.Second, + MinTimeout: 1 * time.Second, + Target: target, + Timeout: timeout, + Refresh: func() (result interface{}, state string, err error) { + var ( + d map[string]interface{} + status interface{} + failStates []string + ) + failStates = append(failStates, "Fault") + d, err = s.ReadResource(resourceData, id) + if err != nil { + return nil, "", err + } + status, err = ve.ObtainSdkValue("Status", d) + if err != nil { + return nil, "", err + } + for _, v := range failStates { + if v == status.(string) { + return nil, "", fmt.Errorf("kafka_topic status error, status: %s", status.(string)) + } + } + return d, status.(string), err + }, + } +} + +func (VolcengineKafkaTopicService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaTopicService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "CreateTopic", + ContentType: ve.ContentTypeJson, + ConvertMode: ve.RequestConvertAll, + Convert: map[string]ve.RequestConvert{ + "all_authority": { + TargetField: "AllAuthority", + ForceGet: true, + }, + "parameters": { + TargetField: "Parameters", + ConvertType: ve.ConvertJsonObject, + NextLevelConvert: map[string]ve.RequestConvert{ + "min_insync_replica_number": { + TargetField: "MinInsyncReplicaNumber", + }, + "message_max_byte": { + TargetField: "MessageMaxByte", + }, + "log_retention_hours": { + TargetField: "LogRetentionHours", + }, + }, + }, + "access_policies": { + TargetField: "AccessPolicies", + ConvertType: ve.ConvertJsonObjectArray, + NextLevelConvert: map[string]ve.RequestConvert{ + "user_name": { + TargetField: "UserName", + }, + "access_policy": { + TargetField: "AccessPolicy", + }, + }, + }, + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + // 转换 Parameters + if param, exist := (*call.SdkParam)["Parameters"]; exist { + paramMap, ok := param.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf(" Parameters is not map ") + } + for key, value := range paramMap { + paramMap[key] = strconv.Itoa(value.(int)) + } + paramBytes, err := json.Marshal(paramMap) + if err != nil { + return nil, fmt.Errorf(" Marshal Parameters error: %v", err) + } + logger.DebugInfo("Marshal Parameters", string(paramBytes)) + (*call.SdkParam)["Parameters"] = string(paramBytes) + } + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + d.SetId(fmt.Sprintf("%v:%v", d.Get("instance_id"), d.Get("topic_name"))) + return nil + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutCreate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaTopicService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + var callbacks []ve.Callback + ids := strings.Split(resourceData.Id(), ":") + + if resourceData.HasChange("description") { + topicCallback := ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyTopicAttributes", + ConvertMode: ve.RequestConvertInConvert, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "description": { + TargetField: "Description", + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + if len(*call.SdkParam) > 0 { + (*call.SdkParam)["InstanceId"] = ids[0] + (*call.SdkParam)["TopicName"] = ids[1] + return true, nil + } + return false, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + callbacks = append(callbacks, topicCallback) + } + + if resourceData.HasChanges("partition_number", "parameters") { + paramCallback := ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyTopicParameters", + ConvertMode: ve.RequestConvertInConvert, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "partition_number": { + TargetField: "PartitionNumber", + }, + "parameters": { + TargetField: "Parameters", + ConvertType: ve.ConvertJsonObject, + ForceGet: true, + NextLevelConvert: map[string]ve.RequestConvert{ + "min_insync_replica_number": { + TargetField: "MinInsyncReplicaNumber", + }, + "message_max_byte": { + TargetField: "MessageMaxByte", + }, + "log_retention_hours": { + TargetField: "LogRetentionHours", + }, + }, + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + if len(*call.SdkParam) > 0 { + (*call.SdkParam)["InstanceId"] = ids[0] + (*call.SdkParam)["TopicName"] = ids[1] + return true, nil + } + return false, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + // 转换 Parameters + if param, exist := (*call.SdkParam)["Parameters"]; exist { + paramMap, ok := param.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf(" Parameters is not map ") + } + for key, value := range paramMap { + paramMap[key] = strconv.Itoa(value.(int)) + } + paramBytes, err := json.Marshal(paramMap) + if err != nil { + return nil, fmt.Errorf(" Marshal Parameters error: %v", err) + } + logger.DebugInfo("Marshal Parameters", string(paramBytes)) + (*call.SdkParam)["Parameters"] = string(paramBytes) + } + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + callbacks = append(callbacks, paramCallback) + } + + if resourceData.HasChanges("all_authority", "access_policies") { + added, removed, _, _ := ve.GetSetDifference("access_policies", resourceData, kafkaAccessPolicyHash, false) + + callbacks = append(callbacks, ve.Callback{ + Call: ve.SdkCall{ + Action: "ModifyTopicAccessPolicies", + ConvertMode: ve.RequestConvertInConvert, + ContentType: ve.ContentTypeJson, + Convert: map[string]ve.RequestConvert{ + "all_authority": { + TargetField: "AllAuthority", + ForceGet: true, + }, + }, + BeforeCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (bool, error) { + (*call.SdkParam)["InstanceId"] = ids[0] + (*call.SdkParam)["TopicName"] = ids[1] + if (*call.SdkParam)["AllAuthority"].(bool) { + return true, nil + } + + (*call.SdkParam)["AccessPolicies"] = make([]interface{}, 0) + (*call.SdkParam)["DeletePolicies"] = make([]string, 0) + userNames := make(map[string]bool) + if added != nil && len(added.List()) > 0 { + for _, ele := range added.List() { + (*call.SdkParam)["AccessPolicies"] = append((*call.SdkParam)["AccessPolicies"].([]interface{}), + map[string]interface{}{ + "UserName": ele.(map[string]interface{})["user_name"], + "AccessPolicy": ele.(map[string]interface{})["access_policy"], + }) + userNames[ele.(map[string]interface{})["user_name"].(string)] = true + } + } + if removed != nil && len(removed.List()) > 0 { + for _, ele := range removed.List() { + if _, exist := userNames[ele.(map[string]interface{})["user_name"].(string)]; exist { + continue + } + (*call.SdkParam)["DeletePolicies"] = append((*call.SdkParam)["DeletePolicies"].([]string), ele.(map[string]interface{})["user_name"].(string)) + } + } + return true, nil + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.ReqFormat, call.Action, call.SdkParam) + resp, err := s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + logger.Debug(logger.RespFormat, call.Action, resp, err) + return resp, err + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + Refresh: &ve.StateRefresh{ + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + }) + } + return callbacks +} + +func (s *VolcengineKafkaTopicService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + ids := strings.Split(resourceData.Id(), ":") + callback := ve.Callback{ + Call: ve.SdkCall{ + Action: "DeleteTopic", + ConvertMode: ve.RequestConvertIgnore, + ContentType: ve.ContentTypeJson, + SdkParam: &map[string]interface{}{ + "InstanceId": ids[0], + "TopicName": ids[1], + }, + ExecuteCall: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall) (*map[string]interface{}, error) { + logger.Debug(logger.RespFormat, call.Action, call.SdkParam) + return s.Client.UniversalClient.DoCall(getUniversalInfo(call.Action), call.SdkParam) + }, + AfterCall: func(d *schema.ResourceData, client *ve.SdkClient, resp *map[string]interface{}, call ve.SdkCall) error { + return ve.CheckResourceUtilRemoved(d, s.ReadResource, 5*time.Minute) + }, + CallError: func(d *schema.ResourceData, client *ve.SdkClient, call ve.SdkCall, baseErr error) error { + //出现错误后重试 + return resource.Retry(15*time.Minute, func() *resource.RetryError { + _, callErr := s.ReadResource(d, "") + if callErr != nil { + if ve.ResourceNotFoundError(callErr) { + return nil + } else { + return resource.NonRetryableError(fmt.Errorf("error on reading kafka topic on delete %q, %w", d.Id(), callErr)) + } + } + _, callErr = call.ExecuteCall(d, client, call) + if callErr == nil { + return nil + } + return resource.RetryableError(callErr) + }) + }, + LockId: func(d *schema.ResourceData) string { + return d.Get("instance_id").(string) + }, + ExtraRefresh: map[ve.ResourceService]*ve.StateRefresh{ + kafka_instance.NewKafkaInstanceService(s.Client): { + Target: []string{"Running"}, + Timeout: resourceData.Timeout(schema.TimeoutUpdate), + ResourceId: resourceData.Get("instance_id").(string), + }, + }, + }, + } + return []ve.Callback{callback} +} + +func (s *VolcengineKafkaTopicService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + IdField: "TopicId", + NameField: "TopicName", + CollectField: "topics", + ExtraData: func(i []interface{}) ([]interface{}, error) { + for index, ele := range i { + element := ele.(map[string]interface{}) + i[index].(map[string]interface{})["TopicId"] = fmt.Sprintf("%v-%v", element["InstanceId"], element["TopicName"]) + } + return i, nil + }, + } +} + +func (s *VolcengineKafkaTopicService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_topic_partition/data_source_volcengine_kafka_topic_partitions.go b/volcengine/kafka/kafka_topic_partition/data_source_volcengine_kafka_topic_partitions.go new file mode 100644 index 00000000..7b929b89 --- /dev/null +++ b/volcengine/kafka/kafka_topic_partition/data_source_volcengine_kafka_topic_partitions.go @@ -0,0 +1,106 @@ +package kafka_topic_partition + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineKafkaTopicPartitions() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineKafkaTopicPartitionsRead, + Schema: map[string]*schema.Schema{ + "instance_id": { + Type: schema.TypeString, + Required: true, + Description: "The id of kafka instance.", + }, + "topic_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of kafka topic.", + }, + "under_insync_only": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether to only query the list of partitions that have out-of-sync replicas, the default value is false.", + }, + "partition_ids": { + Type: schema.TypeSet, + Optional: true, + Set: schema.HashInt, + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + Description: "The index number of partition.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of query.", + }, + "partitions": { + Description: "The collection of query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "partition_id": { + Type: schema.TypeInt, + Computed: true, + Description: "The index number of partition.", + }, + "leader": { + Type: schema.TypeInt, + Computed: true, + Description: "The leader info of partition.", + }, + "start_offset": { + Type: schema.TypeInt, + Computed: true, + Description: "The start offset of partition leader.", + }, + "end_offset": { + Type: schema.TypeInt, + Computed: true, + Description: "The end offset of partition leader.", + }, + "message_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The count of message.", + }, + "replicas": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeInt}, + Computed: true, + Description: "The replica info.", + }, + "insync_replicas": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeInt}, + Computed: true, + Description: "The insync replica info.", + }, + "under_insync_replicas": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeInt}, + Computed: true, + Description: "The under insync replica info.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineKafkaTopicPartitionsRead(d *schema.ResourceData, meta interface{}) error { + service := NewKafkaTopicPartitionService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(service, d, DataSourceVolcengineKafkaTopicPartitions()) +} diff --git a/volcengine/kafka/kafka_topic_partition/service_volcengine_kafka_topic_partition.go b/volcengine/kafka/kafka_topic_partition/service_volcengine_kafka_topic_partition.go new file mode 100644 index 00000000..bd05d158 --- /dev/null +++ b/volcengine/kafka/kafka_topic_partition/service_volcengine_kafka_topic_partition.go @@ -0,0 +1,118 @@ +package kafka_topic_partition + +import ( + "encoding/json" + "errors" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" +) + +type VolcengineKafkaTopicPartitionService struct { + Client *ve.SdkClient +} + +func NewKafkaTopicPartitionService(c *ve.SdkClient) *VolcengineKafkaTopicPartitionService { + return &VolcengineKafkaTopicPartitionService{ + Client: c, + } +} + +func (s *VolcengineKafkaTopicPartitionService) GetClient() *ve.SdkClient { + return s.Client +} + +func (s *VolcengineKafkaTopicPartitionService) ReadResources(m map[string]interface{}) (data []interface{}, err error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + ) + return ve.WithPageNumberQuery(m, "PageSize", "PageNumber", 100, 1, func(condition map[string]interface{}) ([]interface{}, error) { + action := "DescribeTopicPartitions" + + bytes, _ := json.Marshal(condition) + logger.Debug(logger.ReqFormat, action, string(bytes)) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + if err != nil { + return data, err + } + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + if err != nil { + return data, err + } + } + respBytes, _ := json.Marshal(resp) + logger.Debug(logger.RespFormat, action, condition, string(respBytes)) + results, err = ve.ObtainSdkValue("Result.PartitionsInfo", *resp) + if err != nil { + return data, err + } + if results == nil { + results = []interface{}{} + } + if data, ok = results.([]interface{}); !ok { + return data, errors.New("Result.PartitionsInfo is not Slice") + } + return data, err + }) +} + +func (s *VolcengineKafkaTopicPartitionService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + return data, nil +} + +func (s *VolcengineKafkaTopicPartitionService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (s *VolcengineKafkaTopicPartitionService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (VolcengineKafkaTopicPartitionService) WithResourceResponseHandlers(d map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return d, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineKafkaTopicPartitionService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaTopicPartitionService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineKafkaTopicPartitionService) DatasourceResources(*schema.ResourceData, *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + RequestConverts: map[string]ve.RequestConvert{ + "partition_ids": { + TargetField: "PartitionIds", + ConvertType: ve.ConvertJsonArray, + }, + }, + CollectField: "partitions", + ContentType: ve.ContentTypeJson, + } +} + +func (s *VolcengineKafkaTopicPartitionService) ReadResourceId(id string) string { + return id +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} diff --git a/volcengine/kafka/kafka_zone/data_source_volcengine_kafka_zones.go b/volcengine/kafka/kafka_zone/data_source_volcengine_kafka_zones.go new file mode 100644 index 00000000..337da99a --- /dev/null +++ b/volcengine/kafka/kafka_zone/data_source_volcengine_kafka_zones.go @@ -0,0 +1,68 @@ +package kafka_zone + +import ( + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" +) + +func DataSourceVolcengineZones() *schema.Resource { + return &schema.Resource{ + Read: dataSourceVolcengineZonesRead, + Schema: map[string]*schema.Schema{ + "region_id": { + Type: schema.TypeString, + Required: true, + Description: "The Id of Region.", + }, + "output_file": { + Type: schema.TypeString, + Optional: true, + Description: "File name where to save data source results.", + }, + "total_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The total count of zone query.", + }, + "zones": { + Description: "The collection of zone query.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of the zone.", + }, + "zone_id": { + Type: schema.TypeString, + Computed: true, + Description: "The id of the zone.", + }, + "zone_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of the zone.", + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: "The status of the zone.", + }, + "description": { + Type: schema.TypeString, + Computed: true, + Description: "The description of the zone.", + }, + }, + }, + }, + }, + } +} + +func dataSourceVolcengineZonesRead(d *schema.ResourceData, meta interface{}) error { + zoneService := NewZoneService(meta.(*ve.SdkClient)) + return ve.DefaultDispatcher().Data(zoneService, d, DataSourceVolcengineZones()) +} diff --git a/volcengine/kafka/kafka_zone/service_volcengine_kafka_zone.go b/volcengine/kafka/kafka_zone/service_volcengine_kafka_zone.go new file mode 100644 index 00000000..0b11c576 --- /dev/null +++ b/volcengine/kafka/kafka_zone/service_volcengine_kafka_zone.go @@ -0,0 +1,115 @@ +package kafka_zone + +import ( + "errors" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + ve "github.com/volcengine/terraform-provider-volcengine/common" + "github.com/volcengine/terraform-provider-volcengine/logger" +) + +type VolcengineZoneService struct { + Client *ve.SdkClient +} + +func NewZoneService(c *ve.SdkClient) *VolcengineZoneService { + return &VolcengineZoneService{ + Client: c, + } +} + +func (s *VolcengineZoneService) GetClient() *ve.SdkClient { + return s.Client +} + +func getUniversalInfo(actionName string) ve.UniversalInfo { + return ve.UniversalInfo{ + ServiceName: "kafka", + Version: "2022-05-01", + HttpMethod: ve.POST, + ContentType: ve.ApplicationJSON, + Action: actionName, + } +} + +func (s *VolcengineZoneService) ReadResources(condition map[string]interface{}) ([]interface{}, error) { + var ( + resp *map[string]interface{} + results interface{} + ok bool + err error + data []interface{} + ) + action := "DescribeAvailabilityZones" + logger.Debug(logger.ReqFormat, action, condition) + if condition == nil { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), nil) + } else { + resp, err = s.Client.UniversalClient.DoCall(getUniversalInfo(action), &condition) + } + if err != nil { + return nil, err + } + logger.Debug(logger.RespFormat, action, condition, *resp) + + results, err = ve.ObtainSdkValue("Result.Zones", *resp) + if err != nil { + return nil, err + } + if results == nil { + results = make([]interface{}, 0) + } + + if data, ok = results.([]interface{}); !ok { + return nil, errors.New("Result.Zones is not Slice") + } + + return data, nil +} + +func (s *VolcengineZoneService) ReadResource(resourceData *schema.ResourceData, id string) (data map[string]interface{}, err error) { + return nil, nil +} + +func (s *VolcengineZoneService) RefreshResourceState(resourceData *schema.ResourceData, target []string, timeout time.Duration, id string) *resource.StateChangeConf { + return nil +} + +func (s *VolcengineZoneService) WithResourceResponseHandlers(zone map[string]interface{}) []ve.ResourceResponseHandler { + handler := func() (map[string]interface{}, map[string]ve.ResponseConvert, error) { + return zone, nil, nil + } + return []ve.ResourceResponseHandler{handler} +} + +func (s *VolcengineZoneService) CreateResource(resourceData *schema.ResourceData, resource *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineZoneService) ModifyResource(resourceData *schema.ResourceData, resource *schema.Resource) (callbacks []ve.Callback) { + return callbacks +} + +func (s *VolcengineZoneService) RemoveResource(resourceData *schema.ResourceData, r *schema.Resource) []ve.Callback { + return []ve.Callback{} +} + +func (s *VolcengineZoneService) DatasourceResources(data *schema.ResourceData, resource *schema.Resource) ve.DataSourceInfo { + return ve.DataSourceInfo{ + NameField: "ZoneName", + IdField: "ZoneId", + CollectField: "zones", + ResponseConverts: map[string]ve.ResponseConvert{ + "ZoneId": { + TargetField: "id", + KeepDefault: true, + }, + }, + } +} + +func (s *VolcengineZoneService) ReadResourceId(id string) string { + return id +} diff --git a/volcengine/provider.go b/volcengine/provider.go index 2edc51c0..8b15ef8a 100644 --- a/volcengine/provider.go +++ b/volcengine/provider.go @@ -10,6 +10,17 @@ import ( "strings" "time" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_consumed_partition" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_consumed_topic" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_group" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_instance" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_public_address" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_region" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_sasl_user" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_topic" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_topic_partition" + "github.com/volcengine/terraform-provider-volcengine/volcengine/kafka/kafka_zone" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" "github.com/volcengine/terraform-provider-volcengine/volcengine/cloud_identity/cloud_identity_group" "github.com/volcengine/terraform-provider-volcengine/volcengine/cloud_identity/cloud_identity_permission_set" @@ -646,6 +657,17 @@ func Provider() terraform.ResourceProvider { "volcengine_cloud_identity_permission_sets": cloud_identity_permission_set.DataSourceVolcengineCloudIdentityPermissionSets(), "volcengine_cloud_identity_permission_set_assignments": cloud_identity_permission_set_assignment.DataSourceVolcengineCloudIdentityPermissionSetAssignments(), "volcengine_cloud_identity_permission_set_provisionings": cloud_identity_permission_set_provisioning.DataSourceVolcengineCloudIdentityPermissionSetProvisionings(), + + // ================ Kafka ================ + "volcengine_kafka_sasl_users": kafka_sasl_user.DataSourceVolcengineKafkaSaslUsers(), + "volcengine_kafka_topic_partitions": kafka_topic_partition.DataSourceVolcengineKafkaTopicPartitions(), + "volcengine_kafka_groups": kafka_group.DataSourceVolcengineKafkaGroups(), + "volcengine_kafka_topics": kafka_topic.DataSourceVolcengineKafkaTopics(), + "volcengine_kafka_instances": kafka_instance.DataSourceVolcengineKafkaInstances(), + "volcengine_kafka_regions": kafka_region.DataSourceVolcengineRegions(), + "volcengine_kafka_zones": kafka_zone.DataSourceVolcengineZones(), + "volcengine_kafka_consumed_topics": kafka_consumed_topic.DataSourceVolcengineKafkaConsumedTopics(), + "volcengine_kafka_consumed_partitions": kafka_consumed_partition.DataSourceVolcengineKafkaConsumedPartitions(), }, ResourcesMap: map[string]*schema.Resource{ "volcengine_vpc": vpc.ResourceVolcengineVpc(), @@ -925,6 +947,13 @@ func Provider() terraform.ResourceProvider { "volcengine_cloud_identity_permission_set": cloud_identity_permission_set.ResourceVolcengineCloudIdentityPermissionSet(), "volcengine_cloud_identity_permission_set_assignment": cloud_identity_permission_set_assignment.ResourceVolcengineCloudIdentityPermissionSetAssignment(), "volcengine_cloud_identity_permission_set_provisioning": cloud_identity_permission_set_provisioning.ResourceVolcengineCloudIdentityPermissionSetProvisioning(), + + // ================ Kafka ================ + "volcengine_kafka_sasl_user": kafka_sasl_user.ResourceVolcengineKafkaSaslUser(), + "volcengine_kafka_group": kafka_group.ResourceVolcengineKafkaGroup(), + "volcengine_kafka_topic": kafka_topic.ResourceVolcengineKafkaTopic(), + "volcengine_kafka_instance": kafka_instance.ResourceVolcengineKafkaInstance(), + "volcengine_kafka_public_address": kafka_public_address.ResourceVolcengineKafkaPublicAddress(), }, ConfigureFunc: ProviderConfigure, } diff --git a/volcengine/tls/index/data_source_volcengine_tls_indexes.go b/volcengine/tls/index/data_source_volcengine_tls_indexes.go index a9faea34..c71ea553 100644 --- a/volcengine/tls/index/data_source_volcengine_tls_indexes.go +++ b/volcengine/tls/index/data_source_volcengine_tls_indexes.go @@ -115,6 +115,11 @@ func DataSourceVolcengineTlsIndexes() *schema.Resource { Computed: true, Description: "Whether the filed is enabled for analysis.", }, + "index_all": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether to create indexes for all fields in JSON fields with text values.", + }, "json_keys": { Type: schema.TypeList, Computed: true, diff --git a/volcengine/tls/index/resource_volcengine_tls_index.go b/volcengine/tls/index/resource_volcengine_tls_index.go index 522dcab6..f8dc75a3 100644 --- a/volcengine/tls/index/resource_volcengine_tls_index.go +++ b/volcengine/tls/index/resource_volcengine_tls_index.go @@ -108,6 +108,12 @@ func ResourceVolcengineTlsIndex() *schema.Resource { Default: false, Description: "Whether the filed is enabled for analysis.", }, + "index_all": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether to create indexes for all fields in JSON fields with text values. This field is valid when the `value_type` is `json`.", + }, "json_keys": { Type: schema.TypeSet, Optional: true, diff --git a/volcengine/tls/index/service_volcengine_tls_index.go b/volcengine/tls/index/service_volcengine_tls_index.go index 2c989dc9..f8d4f2f9 100644 --- a/volcengine/tls/index/service_volcengine_tls_index.go +++ b/volcengine/tls/index/service_volcengine_tls_index.go @@ -330,6 +330,7 @@ func transKeyValueToRequest(keyValueSet interface{}) ([]interface{}, error) { } keyValue["Key"] = kMap["key"] valueMap["ValueType"] = kMap["value_type"] + sqlFlag := false if v, ok := kMap["case_sensitive"]; ok { valueMap["CaseSensitive"] = v } @@ -339,8 +340,12 @@ func transKeyValueToRequest(keyValueSet interface{}) ([]interface{}, error) { if v, ok := kMap["delimiter"]; ok { valueMap["Delimiter"] = v } + if v, ok := kMap["index_all"]; ok { + valueMap["IndexAll"] = v + } if v, ok := kMap["sql_flag"]; ok { valueMap["SqlFlag"] = v + sqlFlag = v.(bool) } if v, ok := kMap["json_keys"]; ok { jsonKeys := make([]interface{}, 0) @@ -360,6 +365,7 @@ func transKeyValueToRequest(keyValueSet interface{}) ([]interface{}, error) { if v, ok = keyMap["value_type"]; ok { jsonValue := make(map[string]interface{}) jsonValue["ValueType"] = v + jsonValue["SqlFlag"] = sqlFlag jsonKey["Value"] = jsonValue } jsonKeys = append(jsonKeys, jsonKey) diff --git a/website/docs/d/kafka_consumed_partitions.html.markdown b/website/docs/d/kafka_consumed_partitions.html.markdown new file mode 100644 index 00000000..42fd3d1e --- /dev/null +++ b/website/docs/d/kafka_consumed_partitions.html.markdown @@ -0,0 +1,114 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_consumed_partitions" +sidebar_current: "docs-volcengine-datasource-kafka_consumed_partitions" +description: |- + Use this data source to query detailed information of kafka consumed partitions +--- +# volcengine_kafka_consumed_partitions +Use this data source to query detailed information of kafka consumed partitions +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_consumed_partitions" "default" { + instance_id = volcengine_kafka_instance.foo.id + group_id = volcengine_kafka_group.foo.group_id + topic_name = volcengine_kafka_topic.foo.topic_name +} +``` +## Argument Reference +The following arguments are supported: +* `group_id` - (Required) The id of kafka group. +* `instance_id` - (Required) The id of kafka instance. +* `topic_name` - (Required) The name of kafka topic. +* `output_file` - (Optional) File name where to save data source results. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `consumed_partitions` - The collection of query. + * `accumulation` - The total amount of message accumulation in this topic partition for the consumer group. + * `consumed_client` - The consumed client info of partition. + * `consumed_offset` - The consumed offset of partition. + * `end_offset` - The end offset of partition. + * `partition_id` - The index number of partition. + * `start_offset` - The start offset of partition. +* `total_count` - The total count of query. + + diff --git a/website/docs/d/kafka_consumed_topics.html.markdown b/website/docs/d/kafka_consumed_topics.html.markdown new file mode 100644 index 00000000..f8739bf1 --- /dev/null +++ b/website/docs/d/kafka_consumed_topics.html.markdown @@ -0,0 +1,110 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_consumed_topics" +sidebar_current: "docs-volcengine-datasource-kafka_consumed_topics" +description: |- + Use this data source to query detailed information of kafka consumed topics +--- +# volcengine_kafka_consumed_topics +Use this data source to query detailed information of kafka consumed topics +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_consumed_topics" "default" { + instance_id = volcengine_kafka_instance.foo.id + group_id = volcengine_kafka_group.foo.group_id + topic_name = volcengine_kafka_topic.foo.topic_name +} +``` +## Argument Reference +The following arguments are supported: +* `group_id` - (Required) The id of kafka group. +* `instance_id` - (Required) The id of kafka instance. +* `output_file` - (Optional) File name where to save data source results. +* `topic_name` - (Optional) The name of kafka topic. This field supports fuzzy query. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `consumed_topics` - The collection of query. + * `accumulation` - The total amount of message accumulation in this topic for the consumer group. + * `topic_name` - The name of kafka topic. +* `total_count` - The total count of query. + + diff --git a/website/docs/d/kafka_groups.html.markdown b/website/docs/d/kafka_groups.html.markdown new file mode 100644 index 00000000..fcd9dfa5 --- /dev/null +++ b/website/docs/d/kafka_groups.html.markdown @@ -0,0 +1,79 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_groups" +sidebar_current: "docs-volcengine-datasource-kafka_groups" +description: |- + Use this data source to query detailed information of kafka groups +--- +# volcengine_kafka_groups +Use this data source to query detailed information of kafka groups +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} + +data "volcengine_kafka_groups" "default" { + instance_id = volcengine_kafka_group.foo.instance_id +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Required) The instance id of kafka group. +* `group_id` - (Optional) The id of kafka group, support fuzzy matching. +* `name_regex` - (Optional) A Name Regex of kafka group. +* `output_file` - (Optional) File name where to save data source results. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `groups` - The collection of query. + * `group_id` - The id of kafka group. + * `state` - The state of kafka group. +* `total_count` - The total count of query. + + diff --git a/website/docs/d/kafka_instances.html.markdown b/website/docs/d/kafka_instances.html.markdown new file mode 100644 index 00000000..70cfcb9d --- /dev/null +++ b/website/docs/d/kafka_instances.html.markdown @@ -0,0 +1,120 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_instances" +sidebar_current: "docs-volcengine-datasource-kafka_instances" +description: |- + Use this data source to query detailed information of kafka instances +--- +# volcengine_kafka_instances +Use this data source to query detailed information of kafka instances +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +data "volcengine_kafka_instances" "default" { + instance_id = volcengine_kafka_instance.foo.id +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Optional) The id of instance. +* `instance_name` - (Optional) The name of instance. +* `instance_status` - (Optional) The status of instance. +* `output_file` - (Optional) File name where to save data source results. +* `tags` - (Optional) The tags of instance. +* `zone_id` - (Optional) The zone id of instance. + +The `tags` object supports the following: + +* `key` - (Required) The key of tag. +* `value` - (Required) The value of tag. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `instances` - The collection of query. + * `account_id` - The id of account. + * `auto_renew` - The auto renew status of instance. + * `charge_expire_time` - The charge expire time of instance. + * `charge_start_time` - The charge start time of instance. + * `charge_status` - The charge status of instance. + * `charge_type` - The charge type of instance. + * `compute_spec` - The compute spec of instance. + * `connection_info` - Connection info of the instance. + * `endpoint_type` - The endpoint type of instance. + * `internal_endpoint` - The internal endpoint of instance. + * `network_type` - The network type of instance. + * `public_endpoint` - The public endpoint of instance. + * `create_time` - The create time of instance. + * `eip_id` - The id of eip. + * `id` - The id of instance. + * `instance_description` - The description of instance. + * `instance_id` - The id of instance. + * `instance_name` - The name of instance. + * `instance_status` - The status of instance. + * `overdue_reclaim_time` - The overdue reclaim time of instance. + * `overdue_time` - The overdue time of instance. + * `parameters` - Parameters of the instance. + * `parameter_name` - Parameter name. + * `parameter_value` - Parameter value. + * `period_unit` - The period unit of instance. + * `private_domain_on_public` - Whether enable private domain on public. + * `project_name` - The name of project. + * `region_id` - The id of region. + * `storage_space` - The storage space of instance. + * `storage_type` - The storage type of instance. + * `subnet_id` - The id of subnet. + * `tags` - The Tags of instance. + * `key` - The key of tags. + * `value` - The value of tags. + * `usable_partition_number` - The usable partition number of instance. + * `used_group_number` - The used group number of instance. + * `used_partition_number` - The used partition number of instance. + * `used_storage_space` - The used storage space of instance. + * `used_topic_number` - The used topic number of instance. + * `version` - The version of instance. + * `vpc_id` - The id of vpc. + * `zone_id` - The id of zone. +* `total_count` - The total count of query. + + diff --git a/website/docs/d/kafka_regions.html.markdown b/website/docs/d/kafka_regions.html.markdown new file mode 100644 index 00000000..5f0334c2 --- /dev/null +++ b/website/docs/d/kafka_regions.html.markdown @@ -0,0 +1,29 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_regions" +sidebar_current: "docs-volcengine-datasource-kafka_regions" +description: |- + Use this data source to query detailed information of kafka regions +--- +# volcengine_kafka_regions +Use this data source to query detailed information of kafka regions +## Example Usage +```hcl +data "volcengine_kafka_regions" "default" { +} +``` +## Argument Reference +The following arguments are supported: +* `output_file` - (Optional) File name where to save data source results. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `regions` - The collection of region query. + * `description` - The description of region. + * `region_id` - The id of the region. + * `region_name` - The name of region. + * `status` - The status of region. +* `total_count` - The total count of region query. + + diff --git a/website/docs/d/kafka_sasl_users.html.markdown b/website/docs/d/kafka_sasl_users.html.markdown new file mode 100644 index 00000000..e69caecd --- /dev/null +++ b/website/docs/d/kafka_sasl_users.html.markdown @@ -0,0 +1,85 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_sasl_users" +sidebar_current: "docs-volcengine-datasource-kafka_sasl_users" +description: |- + Use this data source to query detailed information of kafka sasl users +--- +# volcengine_kafka_sasl_users +Use this data source to query detailed information of kafka sasl users +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +data "volcengine_kafka_sasl_users" "default" { + instance_id = volcengine_kafka_instance.foo.id + user_name = volcengine_kafka_sasl_user.foo.user_name +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Required) The id of instance. +* `output_file` - (Optional) File name where to save data source results. +* `user_name` - (Optional) The user name, support fuzzy matching. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `total_count` - The total count of query. +* `users` - The collection of query. + * `all_authority` - Whether this user has read and write permissions for all topics. + * `create_time` - The create time. + * `description` - The description of user. + * `password_type` - The type of password. + * `user_name` - The name of user. + + diff --git a/website/docs/d/kafka_topic_partitions.html.markdown b/website/docs/d/kafka_topic_partitions.html.markdown new file mode 100644 index 00000000..d0662c6a --- /dev/null +++ b/website/docs/d/kafka_topic_partitions.html.markdown @@ -0,0 +1,111 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_topic_partitions" +sidebar_current: "docs-volcengine-datasource-kafka_topic_partitions" +description: |- + Use this data source to query detailed information of kafka topic partitions +--- +# volcengine_kafka_topic_partitions +Use this data source to query detailed information of kafka topic partitions +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_topic_partitions" "default" { + instance_id = volcengine_kafka_instance.foo.id + topic_name = volcengine_kafka_topic.foo.topic_name + partition_ids = [1, 2] +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Required) The id of kafka instance. +* `topic_name` - (Required) The name of kafka topic. +* `output_file` - (Optional) File name where to save data source results. +* `partition_ids` - (Optional) The index number of partition. +* `under_insync_only` - (Optional) Whether to only query the list of partitions that have out-of-sync replicas, the default value is false. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `partitions` - The collection of query. + * `end_offset` - The end offset of partition leader. + * `insync_replicas` - The insync replica info. + * `leader` - The leader info of partition. + * `message_count` - The count of message. + * `partition_id` - The index number of partition. + * `replicas` - The replica info. + * `start_offset` - The start offset of partition leader. + * `under_insync_replicas` - The under insync replica info. +* `total_count` - The total count of query. + + diff --git a/website/docs/d/kafka_topics.html.markdown b/website/docs/d/kafka_topics.html.markdown new file mode 100644 index 00000000..4e507329 --- /dev/null +++ b/website/docs/d/kafka_topics.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_topics" +sidebar_current: "docs-volcengine-datasource-kafka_topics" +description: |- + Use this data source to query detailed information of kafka topics +--- +# volcengine_kafka_topics +Use this data source to query detailed information of kafka topics +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} + +data "volcengine_kafka_topics" "default" { + instance_id = volcengine_kafka_topic.foo.instance_id +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Required) The id of kafka instance. +* `name_regex` - (Optional) A Name Regex of kafka topic. +* `output_file` - (Optional) File name where to save data source results. +* `partition_number` - (Optional) The number of partition in kafka topic. +* `replica_number` - (Optional) The number of replica in kafka topic. +* `topic_name` - (Optional) The name of kafka topic. This field supports fuzzy query. +* `user_name` - (Optional) When a user name is specified, only the access policy of the specified user for this Topic will be returned. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `topics` - The collection of query. + * `access_policies` - The access policies info of the kafka topic. + * `access_policy` - The access policy of SASL user. + * `user_name` - The name of SASL user. + * `all_authority` - Whether the kafka topic is configured to be accessible by all users. + * `create_time` - The create time of the kafka topic. + * `description` - The description of the kafka topic. + * `parameters` - The parameters of the kafka topic. + * `log_retention_hours` - The retention hours of log. + * `message_max_byte` - The max byte of message. + * `min_insync_replica_number` - The min number of sync replica. + * `partition_number` - The number of partition in the kafka topic. + * `replica_number` - The number of replica in the kafka topic. + * `status` - The status of the kafka topic. + * `topic_name` - The name of the kafka topic. +* `total_count` - The total count of query. + + diff --git a/website/docs/d/kafka_zones.html.markdown b/website/docs/d/kafka_zones.html.markdown new file mode 100644 index 00000000..80354560 --- /dev/null +++ b/website/docs/d/kafka_zones.html.markdown @@ -0,0 +1,32 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_zones" +sidebar_current: "docs-volcengine-datasource-kafka_zones" +description: |- + Use this data source to query detailed information of kafka zones +--- +# volcengine_kafka_zones +Use this data source to query detailed information of kafka zones +## Example Usage +```hcl +data "volcengine_kafka_zones" "default" { + region_id = "cn-beijing" +} +``` +## Argument Reference +The following arguments are supported: +* `region_id` - (Required) The Id of Region. +* `output_file` - (Optional) File name where to save data source results. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `total_count` - The total count of zone query. +* `zones` - The collection of zone query. + * `description` - The description of the zone. + * `id` - The id of the zone. + * `status` - The status of the zone. + * `zone_id` - The id of the zone. + * `zone_name` - The name of the zone. + + diff --git a/website/docs/d/tls_indexes.html.markdown b/website/docs/d/tls_indexes.html.markdown index c1987550..21d0b067 100644 --- a/website/docs/d/tls_indexes.html.markdown +++ b/website/docs/d/tls_indexes.html.markdown @@ -32,6 +32,7 @@ In addition to all arguments above, the following attributes are exported: * `case_sensitive` - Whether the value is case sensitive. * `delimiter` - The delimiter of the value. * `include_chinese` - Whether the value include chinese. + * `index_all` - Whether to create indexes for all fields in JSON fields with text values. * `json_keys` - The JSON subfield key value index. * `case_sensitive` - Whether the value is case sensitive. * `delimiter` - The delimiter of the value. diff --git a/website/docs/r/kafka_group.html.markdown b/website/docs/r/kafka_group.html.markdown new file mode 100644 index 00000000..1b50a08e --- /dev/null +++ b/website/docs/r/kafka_group.html.markdown @@ -0,0 +1,78 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_group" +sidebar_current: "docs-volcengine-resource-kafka_group" +description: |- + Provides a resource to manage kafka group +--- +# volcengine_kafka_group +Provides a resource to manage kafka group +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_group" "foo" { + instance_id = volcengine_kafka_instance.foo.id + group_id = "acc-test-group" + description = "tf-test" +} +``` +## Argument Reference +The following arguments are supported: +* `group_id` - (Required, ForceNew) The id of kafka group. +* `instance_id` - (Required, ForceNew) The instance id of kafka group. +* `description` - (Optional) The description of kafka group. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `id` - ID of the resource. +* `state` - The state of kafka group. + + +## Import +KafkaGroup can be imported using the instance_id:group_id, e.g. +``` +$ terraform import volcengine_kafka_group.default kafka-****x:groupId +``` + diff --git a/website/docs/r/kafka_instance.html.markdown b/website/docs/r/kafka_instance.html.markdown new file mode 100644 index 00000000..308ff406 --- /dev/null +++ b/website/docs/r/kafka_instance.html.markdown @@ -0,0 +1,113 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_instance" +sidebar_current: "docs-volcengine-resource-kafka_instance" +description: |- + Provides a resource to manage kafka instance +--- +# volcengine_kafka_instance +Provides a resource to manage kafka instance +## Notice +When Destroy this resource,If the resource charge type is PrePaid,Please unsubscribe the resource +in [Volcengine Console](https://console.volcengine.com/finance/unsubscribe/),when complete console operation,yon can +use 'terraform state rm ${resourceId}' to remove. +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } + parameters { + parameter_name = "MessageTimestampType" + parameter_value = "CreateTime" + } + parameters { + parameter_name = "OffsetRetentionMinutes" + parameter_value = "10080" + } + parameters { + parameter_name = "AutoDeleteGroup" + parameter_value = "false" + } +} +``` +## Argument Reference +The following arguments are supported: +* `charge_type` - (Required) The charge type of instance, the value can be `PrePaid` or `PostPaid`. +* `compute_spec` - (Required) The compute spec of instance. +* `subnet_id` - (Required, ForceNew) The subnet id of instance. +* `user_name` - (Required, ForceNew) The user name of instance. When importing resources, this attribute will not be imported. If this attribute is set, please use lifecycle and ignore_changes ignore changes in fields. +* `user_password` - (Required, ForceNew) The user password of instance. When importing resources, this attribute will not be imported. If this attribute is set, please use lifecycle and ignore_changes ignore changes in fields. +* `version` - (Required, ForceNew) The version of instance, the value can be `2.2.2` or `2.8.2`. +* `auto_renew` - (Optional) The auto renew flag of instance. Only effective when instance_charge_type is PrePaid. Default is false. +* `instance_description` - (Optional) The description of instance. +* `instance_name` - (Optional) The name of instance. +* `need_rebalance` - (Optional) Whether enable rebalance. Only effected in modify when compute_spec field is changed. +* `parameters` - (Optional) Parameter of the instance. +* `partition_number` - (Optional) The partition number of instance. +* `period` - (Optional) The period of instance. Only effective when instance_charge_type is PrePaid. Unit is Month. +* `project_name` - (Optional) The project name of instance. +* `rebalance_time` - (Optional) The rebalance time. +* `storage_space` - (Optional) The storage space of instance. +* `storage_type` - (Optional, ForceNew) The storage type of instance. The value can be ESSD_FlexPL or ESSD_PL0. +* `tags` - (Optional) The tags of instance. + +The `parameters` object supports the following: + +* `parameter_name` - (Required) Parameter name. +* `parameter_value` - (Required) Parameter value. + +The `tags` object supports the following: + +* `key` - (Required) The Key of Tags. +* `value` - (Required) The Value of Tags. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `id` - ID of the resource. + + + +## Import +KafkaInstance can be imported using the id, e.g. +``` +$ terraform import volcengine_kafka_instance.default kafka-insbjwbbwb +``` + diff --git a/website/docs/r/kafka_public_address.html.markdown b/website/docs/r/kafka_public_address.html.markdown new file mode 100644 index 00000000..90da30a8 --- /dev/null +++ b/website/docs/r/kafka_public_address.html.markdown @@ -0,0 +1,87 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_public_address" +sidebar_current: "docs-volcengine-resource-kafka_public_address" +description: |- + Provides a resource to manage kafka public address +--- +# volcengine_kafka_public_address +Provides a resource to manage kafka public address +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_eip_address" "foo" { + billing_type = "PostPaidByBandwidth" + bandwidth = 1 + isp = "BGP" + name = "acc-test-eip" + description = "tf-test" + project_name = "default" +} + +resource "volcengine_kafka_public_address" "foo" { + instance_id = volcengine_kafka_instance.foo.id + eip_id = volcengine_eip_address.foo.id +} +``` +## Argument Reference +The following arguments are supported: +* `eip_id` - (Required, ForceNew) The id of eip. +* `instance_id` - (Required, ForceNew) The id of kafka instance. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `id` - ID of the resource. +* `endpoint_type` - The endpoint type of instance. +* `network_type` - The network type of instance. +* `public_endpoint` - The public endpoint of instance. + + +## Import +KafkaPublicAddress can be imported using the instance_id:eip_id, e.g. +``` +$ terraform import volcengine_kafka_public_address.default instance_id:eip_id +``` + diff --git a/website/docs/r/kafka_sasl_user.html.markdown b/website/docs/r/kafka_sasl_user.html.markdown new file mode 100644 index 00000000..bfea62c7 --- /dev/null +++ b/website/docs/r/kafka_sasl_user.html.markdown @@ -0,0 +1,84 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_sasl_user" +sidebar_current: "docs-volcengine-resource-kafka_sasl_user" +description: |- + Provides a resource to manage kafka sasl user +--- +# volcengine_kafka_sasl_user +Provides a resource to manage kafka sasl user +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Required, ForceNew) The id of instance. +* `user_name` - (Required, ForceNew) The name of user. +* `user_password` - (Required, ForceNew) The password of user. +* `all_authority` - (Optional) Whether this user has read and write permissions for all topics. Default is true. +* `description` - (Optional, ForceNew) The description of user. +* `password_type` - (Optional, ForceNew) The type of password. Valid values are `Scram` and `Plain`. Default is `Plain`. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `id` - ID of the resource. + + + +## Import +KafkaSaslUser can be imported using the kafka_id:username, e.g. +``` +$ terraform import volcengine_kafka_sasl_user.default kafka-cnngbnntswg1****:tfuser +``` + diff --git a/website/docs/r/kafka_topic.html.markdown b/website/docs/r/kafka_topic.html.markdown new file mode 100644 index 00000000..1c843ce9 --- /dev/null +++ b/website/docs/r/kafka_topic.html.markdown @@ -0,0 +1,117 @@ +--- +subcategory: "KAFKA" +layout: "volcengine" +page_title: "Volcengine: volcengine_kafka_topic" +sidebar_current: "docs-volcengine-resource-kafka_topic" +description: |- + Provides a resource to manage kafka topic +--- +# volcengine_kafka_topic +Provides a resource to manage kafka topic +## Example Usage +```hcl +data "volcengine_zones" "foo" { +} + +resource "volcengine_vpc" "foo" { + vpc_name = "acc-test-vpc" + cidr_block = "172.16.0.0/16" +} + +resource "volcengine_subnet" "foo" { + subnet_name = "acc-test-subnet" + cidr_block = "172.16.0.0/24" + zone_id = data.volcengine_zones.foo.zones[0].id + vpc_id = volcengine_vpc.foo.id +} + +resource "volcengine_kafka_instance" "foo" { + instance_name = "acc-test-kafka" + instance_description = "tf-test" + version = "2.2.2" + compute_spec = "kafka.20xrate.hw" + subnet_id = volcengine_subnet.foo.id + user_name = "tf-user" + user_password = "tf-pass!@q1" + charge_type = "PostPaid" + storage_space = 300 + partition_number = 350 + project_name = "default" + tags { + key = "k1" + value = "v1" + } + + parameters { + parameter_name = "MessageMaxByte" + parameter_value = "12" + } + parameters { + parameter_name = "LogRetentionHours" + parameter_value = "70" + } +} + +resource "volcengine_kafka_sasl_user" "foo" { + user_name = "acc-test-user" + instance_id = volcengine_kafka_instance.foo.id + user_password = "suqsnis123!" + description = "tf-test" + all_authority = true + password_type = "Scram" +} + +resource "volcengine_kafka_topic" "foo" { + topic_name = "acc-test-topic" + instance_id = volcengine_kafka_instance.foo.id + description = "tf-test" + partition_number = 15 + replica_number = 3 + + parameters { + min_insync_replica_number = 2 + message_max_byte = 10 + log_retention_hours = 96 + } + + all_authority = false + access_policies { + user_name = volcengine_kafka_sasl_user.foo.user_name + access_policy = "Pub" + } +} +``` +## Argument Reference +The following arguments are supported: +* `instance_id` - (Required, ForceNew) The instance id of the kafka topic. +* `partition_number` - (Required) The number of partition in kafka topic. The value range in 1-300. This field can only be adjusted up but not down. +* `topic_name` - (Required, ForceNew) The name of the kafka topic. +* `access_policies` - (Optional) The access policies info of the kafka topic. This field only valid when the value of the AllAuthority is false. +* `all_authority` - (Optional) Whether the kafka topic is configured to be accessible by all users. Default: true. +* `description` - (Optional) The description of the kafka topic. +* `parameters` - (Optional) The parameters of the kafka topic. +* `replica_number` - (Optional, ForceNew) The number of replica in kafka topic. The value can be 2 or 3. Default is 3. + +The `access_policies` object supports the following: + +* `access_policy` - (Required) The access policy of SASL user. Valid values: `PubSub`, `Pub`, `Sub`. +* `user_name` - (Required) The name of SASL user. + +The `parameters` object supports the following: + +* `log_retention_hours` - (Optional) The retention hours of log. Unit: hour. Valid values: 0-2160. Default is 72. +* `message_max_byte` - (Optional) The max byte of message. Unit: MB. Valid values: 1-12. Default is 10. +* `min_insync_replica_number` - (Optional) The min number of sync replica. The default value is the replica number minus 1. + +## Attributes Reference +In addition to all arguments above, the following attributes are exported: +* `id` - ID of the resource. + + + +## Import +KafkaTopic can be imported using the instance_id:topic_name, e.g. +``` +$ terraform import volcengine_kafka_topic.default kafka-cnoeeapetf4s****:topic +``` + diff --git a/website/docs/r/tls_index.html.markdown b/website/docs/r/tls_index.html.markdown index 748dfb37..2ceb8f96 100644 --- a/website/docs/r/tls_index.html.markdown +++ b/website/docs/r/tls_index.html.markdown @@ -11,7 +11,7 @@ Provides a resource to manage tls index ## Example Usage ```hcl resource "volcengine_tls_index" "foo" { - topic_id = "7ce12237-6670-44a7-9d79-2e36961586e6" + topic_id = "227a8d0c-b85b-48df-bee1-0927a595****" # full_text { # case_sensitive = true @@ -25,7 +25,8 @@ resource "volcengine_tls_index" "foo" { case_sensitive = true delimiter = "!" include_chinese = false - sql_flag = false + sql_flag = true + index_all = true json_keys { key = "class" value_type = "text" @@ -88,6 +89,7 @@ The `key_value` object supports the following: * `case_sensitive` - (Optional) Whether the value is case sensitive. * `delimiter` - (Optional) The delimiter of the value. * `include_chinese` - (Optional) Whether the value include chinese. +* `index_all` - (Optional) Whether to create indexes for all fields in JSON fields with text values. This field is valid when the `value_type` is `json`. * `json_keys` - (Optional) The JSON subfield key value index. * `sql_flag` - (Optional) Whether the filed is enabled for analysis. diff --git a/website/volcengine.erb b/website/volcengine.erb index 9142d9f2..321294b3 100644 --- a/website/volcengine.erb +++ b/website/volcengine.erb @@ -838,6 +838,63 @@ +
  • + KAFKA + +
  • MONGODB