Skip to content

Latest commit

 

History

History
128 lines (103 loc) · 3.29 KB

File metadata and controls

128 lines (103 loc) · 3.29 KB

terraform-plugin-confluentcloud

A Terraform plugin for managing Confluent Cloud Kafka Clusters.

Installation

Download and extract the latest release to your terraform plugin directory (typically ~/.terraform.d/plugins/) or define the plugin in the required_providers block.

terraform {
  required_providers {
    confluentcloud = {
      source = "Mongey/confluentcloud"
    }
  }
}

Example

Configure the provider directly, or set the ENV variables CONFLUENT_CLOUD_USERNAME &CONFLUENT_CLOUD_PASSWORD

terraform {
  required_providers {
    confluentcloud = {
      source = "Mongey/confluentcloud"
    }
    kafka = {
      source  = "Mongey/kafka"
      version = "0.2.11"
    }
  }
}

provider "confluentcloud" {
  username = "[email protected]"
  password = "hunter2"
}

resource "confluentcloud_environment" "environment" {
  name = "production"
}

resource "confluentcloud_kafka_cluster" "test" {
  name             = "provider-test"
  service_provider = "aws"
  region           = "eu-west-1"
  availability     = "LOW"
  environment_id   = confluentcloud_environment.environment.id
  deployment = {
    sku = "BASIC"
  }
  network_egress  = 100
  network_ingress = 100
  storage         = 5000
}

resource "confluentcloud_schema_registry" "test" {
  environment_id   = confluentcloud_environment.environment.id
  service_provider = "aws"
  region           = "EU"

  # Requires at least one kafka cluster to enable the schema registry in the environment.
  depends_on = [confluentcloud_kafka_cluster.test]
}

resource "confluentcloud_api_key" "provider_test" {
  cluster_id     = confluentcloud_kafka_cluster.test.id
  environment_id = confluentcloud_environment.environment.id
}

resource "confluentcloud_service_account" "test" {
  name           = "test"
  description    = "service account test"
}

locals {
  bootstrap_servers = [replace(confluentcloud_kafka_cluster.test.bootstrap_servers, "SASL_SSL://", "")]
}

provider "kafka" {
  bootstrap_servers = local.bootstrap_servers

  tls_enabled    = true
  sasl_username  = confluentcloud_api_key.provider_test.key
  sasl_password  = confluentcloud_api_key.provider_test.secret
  sasl_mechanism = "plain"
  timeout        = 10
}

resource "kafka_topic" "syslog" {
  name               = "syslog"
  replication_factor = 3
  partitions         = 1
  config = {
    "cleanup.policy" = "delete"
  }
}

output "kafka_url" {
  value = local.bootstrap_servers
}

output "key" {
  value     = confluentcloud_api_key.provider_test.key
  sensitive = true
}

output "secret" {
  value     = confluentcloud_api_key.provider_test.secret
  sensitive = true
}

Importing existing resources

This provider supports importing existing Confluent Cloud resources via terraform import.

Most resource types use the import IDs returned by the ccloud CLI. confluentcloud_kafka_cluster and confluentcloud_schema_registry can be imported using <environment ID>/<cluster ID>.