diff --git a/CHANGELOG.md b/CHANGELOG.md index 15d5355..542357f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ All notable changes to this project will be documented in this file. ### Features -* Prevent perpetual differences during the terraform plan/apply ([#9](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/issues/9)) ([0f72a49](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/commit/0f72a49624a464cbbb004b18e02efd56d07b175b)) +* Prevent perpetual differences during the terraform plan/apply ([#14](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/issues/9)) ([0f72a49](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/commit/0f72a49624a464cbbb004b18e02efd56d07b175b)) ## [3.3.0](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/compare/v3.2.0...v3.3.0) (2024-03-05) diff --git a/README.md b/README.md index a3a27dd..7b35284 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ Supports all destinations and all Kinesis Firehose Features. * [Opensearch](#opensearch) * [Opensearch Serverless](#opensearch-serverless) * [Splunk](#splunk) + * [Snowflake](#snowflake) * [HTTP Endpoint](#http-endpoint) * [Datadog](#datadog) * [New Relic](#new-relic) @@ -77,11 +78,13 @@ Supports all destinations and all Kinesis Firehose Features. - Data Format Conversion - Dynamic Partition - Redshift - - VPC Support. Security Groups creation supported + - VPC Support. Security Groups creation supported. - ElasticSearch / Opensearch / Opensearch Serverless - - VPC Support. Security Groups creation supported + - VPC Support. Security Groups creation supported. - Splunk - - VPC Support. Security Groups creation supported + - VPC Support. Security Groups creation supported. + - Snowflake + - VPCE Support. - Custom Http Endpoint - DataDog - Coralogix @@ -295,6 +298,28 @@ module "firehose" { } ``` +#### Snowflake + +**To Enabled It:** `destination = "snowflake"` + +**Variables Prefix:** `snowflake_` + +```hcl +module "firehose" { + source = "fdmsantos/kinesis-firehose/aws" + version = "x.x.x" + name = "firehose-delivery-stream" + destination = "snowflake" + snowflake_account_identifier = "" + snowflake_private_key = "" + snowflake_key_passphrase = "" + snowflake_user = "" + snowflake_database = "" + snowflake_schema = "" + snowflake_table = "" +} +``` + #### HTTP Endpoint **To Enabled It:** `destination = "http_endpoint"` @@ -789,9 +814,10 @@ The destination variable configured in module is mapped to firehose valid destin | s3 and extended_s3 | extended_s3 | There is no difference between s3 or extended_s3 destinations | | redshift | redshift | | | splunk | splunk | | -| opensearch | elasticsearch | | +| elasticsearch | elasticsearch | | | opensearch | opensearch | | | opensearchserverless | opensearchserverless | | +| snowflake | snowflake | | | http_endpoint | http_endpoint | | | datadog | http_endpoint | The difference regarding http_endpoint is the http_endpoint_url and http_endpoint_name variables aren't support, and it's necessary configure datadog_endpoint_type variable | | newrelic | http_endpoint | The difference regarding http_endpoint is the http_endpoint_url and http_endpoint_name variables aren't support, and it's necessary configure newrelic_endpoint_type variable | @@ -817,6 +843,7 @@ The destination variable configured in module is mapped to firehose valid destin - [Opensearch Serverless In Vpc](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/opensearch/direct-put-to-opensearchserverless-in-vpc) - Creates a Kinesis Firehose Stream with serverless opensearch in VPC as destination. - [Public Splunk](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/splunk/public-splunk) - Creates a Kinesis Firehose Stream with public splunk as destination. - [Splunk In VPC](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/splunk/splunk-in-vpc) - Creates a Kinesis Firehose Stream with splunk in VPC as destination. +- [Snowflake](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/snowflake/direct-put-to-snowflake) - Creates a Kinesis Firehose Stream with snowflake as destination. - [Custom Http Endpoint](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/http-endpoint/custom-http-endpoint) - Creates a Kinesis Firehose Stream with custom http endpoint as destination. - [Datadog](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/http-endpoint/datadog) - Creates a Kinesis Firehose Stream with datadog europe metrics as destination. - [New Relic](https://github.com/fdmsantos/terraform-aws-kinesis-firehose/tree/main/examples/http-endpoint/newrelic) - Creates a Kinesis Firehose Stream with New Relic europe metrics as destination. @@ -833,13 +860,13 @@ The destination variable configured in module is mapped to firehose valid destin | Name | Version | |------|---------| | [terraform](#requirement\_terraform) | >= 0.13.1 | -| [aws](#requirement\_aws) | >= 5.33 | +| [aws](#requirement\_aws) | >= 5.47 | ## Providers | Name | Version | |------|---------| -| [aws](#provider\_aws) | >= 5.33 | +| [aws](#provider\_aws) | >= 5.47 | ## Modules @@ -927,9 +954,9 @@ No modules. | [buffering\_size](#input\_buffering\_size) | Buffer incoming data to the specified size, in MBs, before delivering it to the destination. | `number` | `5` | no | | [configure\_existing\_application\_role](#input\_configure\_existing\_application\_role) | Set it to True if want use existing application role to add the firehose Policy | `bool` | `false` | no | | [coralogix\_endpoint\_location](#input\_coralogix\_endpoint\_location) | Endpoint Location to coralogix destination | `string` | `"ireland"` | no | -| [coralogix\_parameter\_application\_name](#input\_coralogix\_parameter\_application\_name) | By default, your delivery stream arn will be used as applicationName | `string` | `null` | no | -| [coralogix\_parameter\_subsystem\_name](#input\_coralogix\_parameter\_subsystem\_name) | By default, your delivery stream name will be used as subsystemName | `string` | `null` | no | -| [coralogix\_parameter\_use\_dynamic\_values](#input\_coralogix\_parameter\_use\_dynamic\_values) | To use dynamic values for applicationName and subsystemName | `bool` | `false` | no | +| [coralogix\_parameter\_application\_name](#input\_coralogix\_parameter\_application\_name) | By default, your delivery stream arn will be used as applicationName. | `string` | `null` | no | +| [coralogix\_parameter\_subsystem\_name](#input\_coralogix\_parameter\_subsystem\_name) | By default, your delivery stream name will be used as subsystemName. | `string` | `null` | no | +| [coralogix\_parameter\_use\_dynamic\_values](#input\_coralogix\_parameter\_use\_dynamic\_values) | To use dynamic values for applicationName and subsystemName. | `bool` | `false` | no | | [create](#input\_create) | Controls if kinesis firehose should be created (it affects almost all resources) | `bool` | `true` | no | | [create\_application\_role](#input\_create\_application\_role) | Set it to true to create role to be used by the source | `bool` | `false` | no | | [create\_application\_role\_policy](#input\_create\_application\_role\_policy) | Set it to true to create policy to the role used by the source | `bool` | `false` | no | @@ -1065,6 +1092,20 @@ No modules. | [s3\_kms\_key\_arn](#input\_s3\_kms\_key\_arn) | Specifies the KMS key ARN the stream will use to encrypt data. If not set, no encryption will be used | `string` | `null` | no | | [s3\_own\_bucket](#input\_s3\_own\_bucket) | Indicates if you own the bucket. If not, will be configure permissions to grants the bucket owner full access to the objects delivered by Kinesis Data Firehose | `bool` | `true` | no | | [s3\_prefix](#input\_s3\_prefix) | The YYYY/MM/DD/HH time format prefix is automatically used for delivered S3 files. You can specify an extra prefix to be added in front of the time format prefix. Note that if the prefix ends with a slash, it appears as a folder in the S3 bucket | `string` | `null` | no | +| [snowflake\_account\_identifier](#input\_snowflake\_account\_identifier) | The Snowflake account identifier. | `string` | `null` | no | +| [snowflake\_content\_column\_name](#input\_snowflake\_content\_column\_name) | The name of the content column. | `string` | `null` | no | +| [snowflake\_data\_loading\_option](#input\_snowflake\_data\_loading\_option) | The data loading option. | `string` | `null` | no | +| [snowflake\_database](#input\_snowflake\_database) | The Snowflake database name. | `string` | `null` | no | +| [snowflake\_key\_passphrase](#input\_snowflake\_key\_passphrase) | The Snowflake passphrase for the private key. | `string` | `null` | no | +| [snowflake\_metadata\_column\_name](#input\_snowflake\_metadata\_column\_name) | The name of the metadata column. | `string` | `null` | no | +| [snowflake\_private\_key](#input\_snowflake\_private\_key) | The Snowflake private key for authentication. | `string` | `null` | no | +| [snowflake\_private\_link\_vpce\_id](#input\_snowflake\_private\_link\_vpce\_id) | The VPCE ID for Firehose to privately connect with Snowflake. | `string` | `null` | no | +| [snowflake\_retry\_duration](#input\_snowflake\_retry\_duration) | The length of time during which Firehose retries delivery after a failure, starting from the initial request and including the first attempt. | `string` | `60` | no | +| [snowflake\_role\_configuration\_enabled](#input\_snowflake\_role\_configuration\_enabled) | Whether the Snowflake role is enabled. | `bool` | `false` | no | +| [snowflake\_role\_configuration\_role](#input\_snowflake\_role\_configuration\_role) | The Snowflake role. | `string` | `null` | no | +| [snowflake\_schema](#input\_snowflake\_schema) | The Snowflake schema name. | `string` | `null` | no | +| [snowflake\_table](#input\_snowflake\_table) | The Snowflake table name. | `string` | `null` | no | +| [snowflake\_user](#input\_snowflake\_user) | The user for authentication.. | `string` | `null` | no | | [source\_role\_arn](#input\_source\_role\_arn) | The ARN of the role that provides access to the source. Only Supported on Kinesis and MSK Sources | `string` | `null` | no | | [source\_use\_existing\_role](#input\_source\_use\_existing\_role) | Indicates if want use the kinesis firehose role for sources access. Only Supported on Kinesis and MSK Sources | `bool` | `true` | no | | [splunk\_hec\_acknowledgment\_timeout](#input\_splunk\_hec\_acknowledgment\_timeout) | The amount of time, that Kinesis Firehose waits to receive an acknowledgment from Splunk after it sends it data | `number` | `600` | no | diff --git a/examples/snowflake/direct-put-to-snowflake/README.md b/examples/snowflake/direct-put-to-snowflake/README.md new file mode 100644 index 0000000..1686c5a --- /dev/null +++ b/examples/snowflake/direct-put-to-snowflake/README.md @@ -0,0 +1,60 @@ +# Snowflake + +Configuration in this directory creates kinesis firehose stream with Direct Put as source and Snowflake as destination. + +This example can be tested with Demo Data in Kinesis Firehose Console. + +## Usage + +To run this example you need to execute: + +```bash +$ terraform init +$ terraform plan +$ terraform apply +``` + +Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources. + + +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 0.13.1 | +| [aws](#requirement\_aws) | ~> 5.0 | +| [random](#requirement\_random) | >= 2.0 | + +## Providers + +| Name | Version | +|------|---------| +| [aws](#provider\_aws) | ~> 5.0 | +| [random](#provider\_random) | >= 2.0 | + +## Modules + +| Name | Source | Version | +|------|--------|---------| +| [firehose](#module\_firehose) | ../../../ | n/a | + +## Resources + +| Name | Type | +|------|------| +| [aws_kms_key.this](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | +| [aws_s3_bucket.s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | +| [random_pet.this](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/pet) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [name\_prefix](#input\_name\_prefix) | Name prefix to use in resources | `string` | `"firehose-to-snowflake"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [firehose\_role](#output\_firehose\_role) | Firehose Role | + diff --git a/examples/snowflake/direct-put-to-snowflake/main.tf b/examples/snowflake/direct-put-to-snowflake/main.tf new file mode 100644 index 0000000..5ca3959 --- /dev/null +++ b/examples/snowflake/direct-put-to-snowflake/main.tf @@ -0,0 +1,38 @@ +resource "random_pet" "this" { + length = 2 +} + +resource "aws_s3_bucket" "s3" { + bucket = "${var.name_prefix}-dest-bucket-${random_pet.this.id}" + force_destroy = true +} + +resource "aws_kms_key" "this" { + description = "${var.name_prefix}-kms-key" + deletion_window_in_days = 7 +} + +module "firehose" { + source = "../../../" + name = "${var.name_prefix}-delivery-stream" + destination = "snowflake" + snowflake_account_identifier = "demo" + snowflake_private_key = "MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDIQEU9NvCE4EyK0QZtFBLYWX6KAnNmel4zHsNJ4WEjNzY/YEASrJ9YtHjxItVig4kQqumn8FWkbPoKLUYUqIq9UBIvtjzlsTgMJ7GznShsm0M/n1Bszqmxwm1AwFAPfH21h2MNIzXkHitg/2BN3bkTGctmySFOwNRRervo5HIUtr4qqYZwVYDQtT8+NVL1Tchvgkv4kOuQmDXpmc7iRPx0WQZU1dyPzJ9Vg5sN2nJPZwfRTL0dJfoOVOjJQTZSAEvNw3d05ez0aKBMWYM97ZFc6IJzaSEx19RYjPnluYWlpkUp309cIUlGQHGmVSxPpaoOrI5cfHTdudCzYmQiRxebAgMBAAECggEAA+/5zIx/8Pav2plXqyu50SI1WSHlwm4iFM/LbRsu30WrJQFPwvx10kyFPrOoXBxbNoYvkPQqagmiShYozhn1nGenehyTfEqztV15xi0rnyTXgNRcC2pRhGrCbGcCvcM2DAHewlRQoTsh9uM7ByQIbp798QYqnTbbTsPw+kLt34jpz3eJjFMqB+uVtLuFDA4PZi1Nq/EJhWyuwi3taW2dKn3gx0DD69yxq5lS8USV/XQ0BrF4bbcmQoEJuKnt16hMGl9PMDkqX9DPnrxBR6a9BDMaOw/r4kyOXQBgPIRr59UfN13E1Lj35rXnK6C0TcA98pichFFYjiUvR5ss+Ob7KQKBgQD9ZHqGlI+s826ov4XTbnhcxUGBFX1NHoU9zE28w6bs2++0Bim+jMgwIdJmG7ziOsV9PpvP5Zq3N2tPnkSEA+q8N+BtfBOf60kjpe4eoaYOZiGFpqGPmAW9p+b+gWsOxyUQ2HA9FdUCwEnnWx1gIdJ5BFo4YIEdJWx3Uybw1fxtswKBgQDKT8yizQ0y0dzaCyxNeIIzYpg8Cx77tvV0EFBhDQIt/fEZOIBruBZUaZYaZReEv7VHd6bIGKASDFOx7XtdhlVbfa2p5o/7rPYlAhgsCwfW94ESYJw0X3KTlS9ulSseF+bmPBHIIXPfjARcJIDi4TKv60vbW1Wdxcv08uvFTvjKeQKBgDDeEt8ngXnqTJoQrZ9z+5Rwmkxpt4uK6klbwFY6KVQeqmC+m4hbIDRgIXJ9wPSkPvgDfgsfDbJt5q0pKa+IDdoUsJyMxEAgIS/VzVFs/Vhji+15kEjgGaNU4TCOBvaHo3dXNnYhYr4wFVCf+s9SVoPuOfQLcHsNf5iXmbfynMcPAoGBAKeZPBmSbWCwYplvsB/tuU8AWsVDIUO96dFgwnXj5O5c9SLDn/+c3ULIxcTQAo/CkVbHVK9nVxQciilYZ16vLn9AumGJ07XXL4KxHX0/FhuLpq2mw0DP4YdJi6W8hZ/EhVAuazy0Gd4TjHkY9Hz/upHqB0mNfHvbpH8jzxYBujFhAoGBAMn0LHHuaajivswiK9QpM95qv2tk1wC7spZQXh2Ky4TYcTo3S83datye7Uk85NKYt4790anaGjegA6cTbuky8FgnGm1+iqVhyGxfUMPwREgWOZ3km0DeQGHxApYHiVx2xD6oZzTVpgxM7S6pCX2YxxWQolq7mIfOg5h6U6b5GmiT" + snowflake_user = "user" + snowflake_database = "database" + snowflake_schema = "schema" + snowflake_table = "table" + snowflake_data_loading_option = "VARIANT_CONTENT_AND_METADATA_MAPPING" + snowflake_metadata_column_name = "test" + snowflake_content_column_name = "test" + snowflake_role_configuration_enabled = true + snowflake_role_configuration_role = "snowflake_role" + s3_backup_mode = "FailedOnly" + s3_backup_prefix = "backup/" + s3_backup_bucket_arn = aws_s3_bucket.s3.arn + s3_backup_buffering_interval = 100 + s3_backup_buffering_size = 100 + s3_backup_compression = "GZIP" + s3_backup_enable_encryption = true + s3_backup_kms_key_arn = aws_kms_key.this.arn +} diff --git a/examples/snowflake/direct-put-to-snowflake/outputs.tf b/examples/snowflake/direct-put-to-snowflake/outputs.tf new file mode 100644 index 0000000..d149d32 --- /dev/null +++ b/examples/snowflake/direct-put-to-snowflake/outputs.tf @@ -0,0 +1,4 @@ +output "firehose_role" { + description = "Firehose Role" + value = module.firehose.kinesis_firehose_role_arn +} diff --git a/examples/snowflake/direct-put-to-snowflake/variables.tf b/examples/snowflake/direct-put-to-snowflake/variables.tf new file mode 100644 index 0000000..dcc8ba5 --- /dev/null +++ b/examples/snowflake/direct-put-to-snowflake/variables.tf @@ -0,0 +1,5 @@ +variable "name_prefix" { + description = "Name prefix to use in resources" + type = string + default = "firehose-to-snowflake" +} diff --git a/examples/snowflake/direct-put-to-snowflake/versions.tf b/examples/snowflake/direct-put-to-snowflake/versions.tf new file mode 100644 index 0000000..bee969b --- /dev/null +++ b/examples/snowflake/direct-put-to-snowflake/versions.tf @@ -0,0 +1,14 @@ +terraform { + required_version = ">= 0.13.1" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5.0" + } + random = { + source = "hashicorp/random" + version = ">= 2.0" + } + } +} diff --git a/locals.tf b/locals.tf index 3bca99f..a3a20b3 100644 --- a/locals.tf +++ b/locals.tf @@ -22,7 +22,8 @@ locals { honeycomb : "http_endpoint", logicmonitor : "http_endpoint", mongodb : "http_endpoint", - sumologic : "http_endpoint" + sumologic : "http_endpoint", + snowflake : "snowflake" } destination = local.destinations[var.destination] s3_destination = local.destination == "extended_s3" ? true : false @@ -143,7 +144,7 @@ locals { ) : null) # S3 Backup - use_backup_vars_in_s3_configuration = contains(["elasticsearch", "opensearch", "opensearchserverless", "splunk", "http_endpoint"], local.destination) ? true : false + use_backup_vars_in_s3_configuration = contains(["elasticsearch", "opensearch", "opensearchserverless", "splunk", "http_endpoint", "snowflake"], local.destination) ? true : false s3_backup = var.enable_s3_backup ? "Enabled" : "Disabled" enable_s3_backup = var.enable_s3_backup || local.use_backup_vars_in_s3_configuration s3_backup_role_arn = (local.enable_s3_backup ? ( @@ -172,6 +173,10 @@ locals { FailedOnly : "FailedDataOnly", All : "AllData" } + snowflake : { + FailedOnly : "FailedDataOnly", + All : "AllData" + } } s3_backup_mode = local.use_backup_vars_in_s3_configuration ? local.backup_modes[local.destination][var.s3_backup_mode] : null diff --git a/main.tf b/main.tf index 9ceb25a..25cec32 100644 --- a/main.tf +++ b/main.tf @@ -573,6 +573,77 @@ resource "aws_kinesis_firehose_delivery_stream" "this" { } } + dynamic "snowflake_configuration" { + for_each = local.destination == "snowflake" ? [1] : [] + content { + account_url = "https://${var.snowflake_account_identifier}.snowflakecomputing.com" + database = var.snowflake_database + private_key = var.snowflake_private_key + key_passphrase = var.snowflake_key_passphrase + role_arn = local.firehose_role_arn + schema = var.snowflake_schema + table = var.snowflake_table + user = var.snowflake_user + data_loading_option = var.snowflake_data_loading_option + metadata_column_name = var.snowflake_metadata_column_name + content_column_name = var.snowflake_content_column_name + s3_backup_mode = local.s3_backup_mode + retry_duration = var.snowflake_retry_duration + + snowflake_role_configuration { + enabled = var.snowflake_role_configuration_enabled + snowflake_role = var.snowflake_role_configuration_role + } + + dynamic "snowflake_vpc_configuration" { + for_each = var.snowflake_private_link_vpce_id != null ? [1] : [] + content { + private_link_vpce_id = var.snowflake_private_link_vpce_id + } + } + + dynamic "cloudwatch_logging_options" { + for_each = var.enable_destination_log ? [1] : [] + content { + enabled = var.enable_destination_log + log_group_name = local.destination_cw_log_group_name + log_stream_name = local.destination_cw_log_stream_name + } + } + + dynamic "processing_configuration" { + for_each = local.enable_processing ? [1] : [] + content { + enabled = local.enable_processing + dynamic "processors" { + for_each = local.processors + content { + type = processors.value["type"] + dynamic "parameters" { + for_each = processors.value["parameters"] + content { + parameter_name = parameters.value["name"] + parameter_value = parameters.value["value"] + } + } + } + } + } + } + + s3_configuration { + role_arn = !local.use_backup_vars_in_s3_configuration ? local.firehose_role_arn : local.s3_backup_role_arn + bucket_arn = !local.use_backup_vars_in_s3_configuration ? var.s3_bucket_arn : var.s3_backup_bucket_arn + buffering_size = !local.use_backup_vars_in_s3_configuration ? var.s3_configuration_buffering_size : var.s3_backup_buffering_size + buffering_interval = !local.use_backup_vars_in_s3_configuration ? var.s3_configuration_buffering_interval : var.s3_backup_buffering_interval + compression_format = !local.use_backup_vars_in_s3_configuration ? var.s3_compression_format : var.s3_backup_compression + prefix = !local.use_backup_vars_in_s3_configuration ? var.s3_prefix : var.s3_backup_prefix + error_output_prefix = !local.use_backup_vars_in_s3_configuration ? var.s3_error_output_prefix : var.s3_backup_error_output_prefix + kms_key_arn = (!local.use_backup_vars_in_s3_configuration && var.enable_s3_encryption ? var.s3_kms_key_arn : (local.use_backup_vars_in_s3_configuration && var.s3_backup_enable_encryption ? var.s3_backup_kms_key_arn : null)) + } + } + } + tags = var.tags } diff --git a/variables.tf b/variables.tf index db3474e..b91ab61 100644 --- a/variables.tf +++ b/variables.tf @@ -24,7 +24,7 @@ variable "destination" { type = string validation { error_message = "Please use a valid destination!" - condition = contains(["s3", "extended_s3", "redshift", "opensearch", "opensearchserverless", "elasticsearch", "splunk", "http_endpoint", "datadog", "coralogix", "newrelic", "dynatrace", "honeycomb", "logicmonitor", "mongodb", "sumologic"], var.destination) + condition = contains(["s3", "extended_s3", "redshift", "opensearch", "opensearchserverless", "elasticsearch", "splunk", "http_endpoint", "datadog", "coralogix", "newrelic", "dynatrace", "honeycomb", "logicmonitor", "mongodb", "sumologic", "snowflake"], var.destination) } } @@ -1196,23 +1196,120 @@ variable "coralogix_endpoint_location" { } variable "coralogix_parameter_application_name" { - description = "By default, your delivery stream arn will be used as applicationName" + description = "By default, your delivery stream arn will be used as applicationName." type = string default = null } variable "coralogix_parameter_subsystem_name" { - description = "By default, your delivery stream name will be used as subsystemName" + description = "By default, your delivery stream name will be used as subsystemName." type = string default = null } variable "coralogix_parameter_use_dynamic_values" { - description = "To use dynamic values for applicationName and subsystemName" + description = "To use dynamic values for applicationName and subsystemName." type = bool default = false } +###### +# Snowflake Destination Variables +###### +variable "snowflake_account_identifier" { + description = "The Snowflake account identifier." + type = string + default = null +} + +variable "snowflake_private_key" { + description = "The Snowflake private key for authentication." + type = string + default = null + sensitive = true +} + +variable "snowflake_key_passphrase" { + description = "The Snowflake passphrase for the private key." + type = string + default = null + sensitive = true +} + +variable "snowflake_user" { + description = "The user for authentication.." + type = string + default = null +} + +variable "snowflake_database" { + description = "The Snowflake database name." + type = string + default = null +} + +variable "snowflake_schema" { + description = "The Snowflake schema name." + type = string + default = null +} + +variable "snowflake_table" { + description = "The Snowflake table name." + type = string + default = null +} + +variable "snowflake_data_loading_option" { + description = "The data loading option." + type = string + default = null + validation { + error_message = "Please use a valid data loading option!" + condition = var.snowflake_data_loading_option == null || contains(["JSON_MAPPING", "VARIANT_CONTENT_MAPPING", "VARIANT_CONTENT_AND_METADATA_MAPPING"], coalesce(var.snowflake_data_loading_option, "dummy")) + } +} + +variable "snowflake_metadata_column_name" { + description = "The name of the metadata column." + type = string + default = null +} + +variable "snowflake_content_column_name" { + description = "The name of the content column." + type = string + default = null +} + +variable "snowflake_private_link_vpce_id" { + description = "The VPCE ID for Firehose to privately connect with Snowflake." + type = string + default = null +} + +variable "snowflake_role_configuration_enabled" { + description = "Whether the Snowflake role is enabled." + type = bool + default = false +} + +variable "snowflake_role_configuration_role" { + description = "The Snowflake role." + type = string + default = null +} + +variable "snowflake_retry_duration" { + description = "The length of time during which Firehose retries delivery after a failure, starting from the initial request and including the first attempt." + type = string + default = 60 + validation { + error_message = "Minimum: 0 second, maximum: 7200 seconds." + condition = var.snowflake_retry_duration >= 0 && var.snowflake_retry_duration <= 7200 + } +} + ###### # IAM ###### diff --git a/versions.tf b/versions.tf index 60d735e..fa1e158 100644 --- a/versions.tf +++ b/versions.tf @@ -3,7 +3,7 @@ terraform { required_providers { aws = { source = "hashicorp/aws" - version = ">= 5.33" + version = ">= 5.47" } } }