, Error> {
+ // Extract some useful information from the request
+ let who = event
+ .query_string_parameters_ref()
+ .and_then(|params| params.first("name"))
+ .unwrap_or("world");
+ let message = format!("Hello {who}, this is an AWS Lambda HTTP request. serverless.tf was here!");
+
+ // Return something that implements IntoResponse.
+ // It will be serialized to the right response event automatically by the runtime
+ let resp = Response::builder()
+ .status(200)
+ .header("content-type", "text/html")
+ .body(message.into())
+ .map_err(Box::new)?;
+ Ok(resp)
+}
+
+#[tokio::main]
+async fn main() -> Result<(), Error> {
+ tracing::init_default_subscriber();
+
+ run(service_fn(function_handler)).await
+}
diff --git a/examples/multiple-regions/README.md b/examples/multiple-regions/README.md
index af982fc8..7c09e0b0 100644
--- a/examples/multiple-regions/README.md
+++ b/examples/multiple-regions/README.md
@@ -15,21 +15,21 @@ $ terraform apply
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 2.0 |
## Providers
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | >= 5.32 |
-| [aws.us-east-1](#provider\_aws.us-east-1) | >= 5.32 |
+| [aws](#provider\_aws) | >= 5.79 |
+| [aws.us-east-1](#provider\_aws.us-east-1) | >= 5.79 |
| [random](#provider\_random) | >= 2.0 |
## Modules
@@ -75,4 +75,4 @@ No inputs.
| [lambda\_role\_name](#output\_lambda\_role\_name) | The name of the IAM role created for the Lambda Function |
| [local\_filename](#output\_local\_filename) | The filename of zip archive deployed (if deployment was from local) |
| [s3\_object](#output\_s3\_object) | The map with S3 object data of zip archive deployed (if deployment was from S3) |
-
+
diff --git a/examples/multiple-regions/versions.tf b/examples/multiple-regions/versions.tf
index 55278d04..5cf868aa 100644
--- a/examples/multiple-regions/versions.tf
+++ b/examples/multiple-regions/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/examples/runtimes/README.md b/examples/runtimes/README.md
new file mode 100644
index 00000000..9e67eeb7
--- /dev/null
+++ b/examples/runtimes/README.md
@@ -0,0 +1,68 @@
+# Runtimes Examples
+
+Configuration in this directory creates deployment packages for [various runtimes and programming languages (Rust, Go, Java)](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html).
+
+Each runtime is executable by calling created Lambda Functions at the end.
+
+Look into [Build Package Examples](https://github.com/terraform-aws-modules/terraform-aws-lambda/tree/master/examples/build-package) for more ways to build package (regardless of the runtime).
+
+## Usage
+
+To run this example you need to execute:
+
+```bash
+$ terraform init
+$ terraform plan
+$ terraform apply
+```
+
+Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
+
+
+## Requirements
+
+| Name | Version |
+|------|---------|
+| [terraform](#requirement\_terraform) | >= 1.2 |
+| [aws](#requirement\_aws) | >= 5.79 |
+| [http](#requirement\_http) | >= 3.0 |
+| [random](#requirement\_random) | >= 3.0 |
+
+## Providers
+
+| Name | Version |
+|------|---------|
+| [aws](#provider\_aws) | >= 5.79 |
+| [http](#provider\_http) | >= 3.0 |
+| [random](#provider\_random) | >= 3.0 |
+
+## Modules
+
+| Name | Source | Version |
+|------|--------|---------|
+| [go\_lambda\_function](#module\_go\_lambda\_function) | ../../ | n/a |
+| [java21\_lambda\_function](#module\_java21\_lambda\_function) | ../../ | n/a |
+| [rust\_lambda\_function](#module\_rust\_lambda\_function) | ../../ | n/a |
+
+## Resources
+
+| Name | Type |
+|------|------|
+| [random_pet.this](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/pet) | resource |
+| [aws_lambda_invocation.this](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/lambda_invocation) | data source |
+| [http_http.this](https://registry.terraform.io/providers/hashicorp/http/latest/docs/data-sources/http) | data source |
+
+## Inputs
+
+No inputs.
+
+## Outputs
+
+| Name | Description |
+|------|-------------|
+| [go\_lambda\_function\_url](#output\_go\_lambda\_function\_url) | The URL of the Lambda Function in Go |
+| [java21\_lambda\_function\_arn](#output\_java21\_lambda\_function\_arn) | The ARN of the Lambda Function in Java 21 |
+| [lambda\_function\_result](#output\_lambda\_function\_result) | The results of the Lambda Function calls |
+| [lambda\_function\_status\_codes](#output\_lambda\_function\_status\_codes) | The status codes of the Lambda Function calls |
+| [rust\_lambda\_function\_url](#output\_rust\_lambda\_function\_url) | The URL of the Lambda Function in Rust |
+
diff --git a/examples/runtimes/checks.tf b/examples/runtimes/checks.tf
new file mode 100644
index 00000000..cbf0f2b8
--- /dev/null
+++ b/examples/runtimes/checks.tf
@@ -0,0 +1,37 @@
+locals {
+ successful_response_keyword = "serverless.tf"
+}
+
+data "http" "this" {
+ for_each = {
+ rust = module.rust_lambda_function.lambda_function_url,
+ go = module.go_lambda_function.lambda_function_url,
+ }
+
+ url = each.value
+
+ lifecycle {
+ postcondition {
+ condition = length(regexall(local.successful_response_keyword, self.response_body)) > 0
+ error_message = "${each.key}: ${local.successful_response_keyword} should be in the response."
+ }
+ }
+}
+
+# I don't know how to make Java21 example to work with Lambda Function URL, so using Lambda Function invocation instead
+data "aws_lambda_invocation" "this" {
+ for_each = {
+ java21 = module.java21_lambda_function.lambda_function_name,
+ }
+
+ function_name = each.value
+
+ input = jsonencode({})
+
+ lifecycle {
+ postcondition {
+ condition = length(regexall(local.successful_response_keyword, jsondecode(self.result))) > 0
+ error_message = "${each.key}: ${local.successful_response_keyword} should be in the response."
+ }
+ }
+}
diff --git a/examples/runtimes/main.tf b/examples/runtimes/main.tf
new file mode 100644
index 00000000..b9bd61a9
--- /dev/null
+++ b/examples/runtimes/main.tf
@@ -0,0 +1,98 @@
+provider "aws" {
+ region = "eu-west-1"
+}
+
+module "rust_lambda_function" {
+ source = "../../"
+
+ function_name = "${random_pet.this.id}-rust"
+
+ attach_cloudwatch_logs_policy = false
+ cloudwatch_logs_retention_in_days = 1
+
+ create_lambda_function_url = true
+
+ handler = "bootstrap"
+ runtime = "provided.al2023"
+ architectures = ["arm64"] # x86_64 (empty); arm64 (cargo lambda build --arm64)
+
+ trigger_on_package_timestamp = false
+
+ source_path = [
+ {
+ path = "${path.module}/../fixtures/runtimes/rust"
+ commands = [
+ # https://www.cargo-lambda.info/
+ "cargo lambda build --release --arm64",
+ "cd target/lambda/rust-app1",
+ ":zip",
+ ]
+ patterns = [
+ "!.*",
+ "bootstrap",
+ ]
+ }
+ ]
+}
+
+module "go_lambda_function" {
+ source = "../../"
+
+ function_name = "${random_pet.this.id}-go"
+
+ attach_cloudwatch_logs_policy = false
+ cloudwatch_logs_retention_in_days = 1
+
+ create_lambda_function_url = true
+
+ handler = "bootstrap"
+ runtime = "provided.al2023"
+ architectures = ["arm64"] # x86_64 (GOARCH=amd64); arm64 (GOARCH=arm64)
+
+ trigger_on_package_timestamp = false
+
+ source_path = [
+ {
+ path = "${path.module}/../fixtures/runtimes/go"
+ commands = [
+ "GOOS=linux GOARCH=arm64 CGO_ENABLED=0 go build -o bootstrap main.go",
+ ":zip",
+ ]
+ patterns = [
+ "!.*",
+ "bootstrap",
+ ]
+ }
+ ]
+}
+
+module "java21_lambda_function" {
+ source = "../../"
+
+ function_name = "${random_pet.this.id}-java21"
+
+ attach_cloudwatch_logs_policy = false
+ cloudwatch_logs_retention_in_days = 1
+
+ handler = "example.Handler"
+ runtime = "java21"
+ architectures = ["arm64"] # x86_64 or arm64
+ timeout = 30
+
+ trigger_on_package_timestamp = false
+
+ source_path = [
+ {
+ path = "${path.module}/../fixtures/runtimes/java21"
+ commands = [
+ "gradle build -i",
+ "cd build/output",
+ ":zip",
+ ]
+ }
+ ]
+}
+
+resource "random_pet" "this" {
+ length = 2
+}
diff --git a/examples/runtimes/outputs.tf b/examples/runtimes/outputs.tf
new file mode 100644
index 00000000..9c12c1d6
--- /dev/null
+++ b/examples/runtimes/outputs.tf
@@ -0,0 +1,24 @@
+output "rust_lambda_function_url" {
+ description = "The URL of the Lambda Function in Rust"
+ value = module.rust_lambda_function.lambda_function_url
+}
+
+output "go_lambda_function_url" {
+ description = "The URL of the Lambda Function in Go"
+ value = module.go_lambda_function.lambda_function_url
+}
+
+output "java21_lambda_function_arn" {
+ description = "The ARN of the Lambda Function in Java 21"
+ value = module.java21_lambda_function.lambda_function_arn
+}
+
+output "lambda_function_result" {
+ description = "The results of the Lambda Function calls"
+ value = { for k, v in data.aws_lambda_invocation.this : k => jsondecode(v.result) }
+}
+
+output "lambda_function_status_codes" {
+ description = "The status codes of the Lambda Function calls"
+ value = { for k, v in data.http.this : k => v.status_code }
+}
diff --git a/examples/runtimes/variables.tf b/examples/runtimes/variables.tf
new file mode 100644
index 00000000..e69de29b
diff --git a/examples/runtimes/versions.tf b/examples/runtimes/versions.tf
new file mode 100644
index 00000000..6c4e77fa
--- /dev/null
+++ b/examples/runtimes/versions.tf
@@ -0,0 +1,18 @@
+terraform {
+ required_version = ">= 1.2"
+
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = ">= 5.79"
+ }
+ random = {
+ source = "hashicorp/random"
+ version = ">= 3.0"
+ }
+ http = {
+ source = "hashicorp/http"
+ version = ">= 3.0"
+ }
+ }
+}
diff --git a/examples/simple-cicd/README.md b/examples/simple-cicd/README.md
index 93d1e4c5..2edb2e51 100644
--- a/examples/simple-cicd/README.md
+++ b/examples/simple-cicd/README.md
@@ -16,13 +16,13 @@ To run this example you need to execute:
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 2.0 |
## Providers
@@ -50,4 +50,4 @@ No inputs.
## Outputs
No outputs.
-
+
diff --git a/examples/simple-cicd/versions.tf b/examples/simple-cicd/versions.tf
index 55278d04..5cf868aa 100644
--- a/examples/simple-cicd/versions.tf
+++ b/examples/simple-cicd/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/examples/simple/README.md b/examples/simple/README.md
index 4c093861..bee54ded 100644
--- a/examples/simple/README.md
+++ b/examples/simple/README.md
@@ -14,13 +14,13 @@ $ terraform apply
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 2.0 |
## Providers
@@ -69,4 +69,4 @@ No inputs.
| [lambda\_role\_name](#output\_lambda\_role\_name) | The name of the IAM role created for the Lambda Function |
| [local\_filename](#output\_local\_filename) | The filename of zip archive deployed (if deployment was from local) |
| [s3\_object](#output\_s3\_object) | The map with S3 object data of zip archive deployed (if deployment was from S3) |
-
+
diff --git a/examples/simple/versions.tf b/examples/simple/versions.tf
index 55278d04..5cf868aa 100644
--- a/examples/simple/versions.tf
+++ b/examples/simple/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/examples/triggers/README.md b/examples/triggers/README.md
index 0bfc0a69..c6a6fde9 100644
--- a/examples/triggers/README.md
+++ b/examples/triggers/README.md
@@ -15,20 +15,20 @@ $ terraform apply
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 2.0 |
## Providers
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | >= 5.32 |
+| [aws](#provider\_aws) | >= 5.79 |
| [random](#provider\_random) | >= 2.0 |
## Modules
@@ -73,4 +73,4 @@ No inputs.
| [lambda\_role\_name](#output\_lambda\_role\_name) | The name of the IAM role created for the Lambda Function |
| [local\_filename](#output\_local\_filename) | The filename of zip archive deployed (if deployment was from local) |
| [s3\_object](#output\_s3\_object) | The map with S3 object data of zip archive deployed (if deployment was from S3) |
-
+
diff --git a/examples/triggers/versions.tf b/examples/triggers/versions.tf
index 55278d04..5cf868aa 100644
--- a/examples/triggers/versions.tf
+++ b/examples/triggers/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/examples/with-efs/README.md b/examples/with-efs/README.md
index f835445d..742816ba 100644
--- a/examples/with-efs/README.md
+++ b/examples/with-efs/README.md
@@ -15,20 +15,20 @@ $ terraform apply
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 2.0 |
## Providers
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | >= 5.32 |
+| [aws](#provider\_aws) | >= 5.79 |
| [random](#provider\_random) | >= 2.0 |
## Modules
@@ -75,4 +75,4 @@ No inputs.
| [lambda\_role\_name](#output\_lambda\_role\_name) | The name of the IAM role created for the Lambda Function |
| [local\_filename](#output\_local\_filename) | The filename of zip archive deployed (if deployment was from local) |
| [s3\_object](#output\_s3\_object) | The map with S3 object data of zip archive deployed (if deployment was from S3) |
-
+
diff --git a/examples/with-efs/versions.tf b/examples/with-efs/versions.tf
index 55278d04..5cf868aa 100644
--- a/examples/with-efs/versions.tf
+++ b/examples/with-efs/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/examples/with-vpc-s3-endpoint/README.md b/examples/with-vpc-s3-endpoint/README.md
index d84f6bdc..60a93661 100644
--- a/examples/with-vpc-s3-endpoint/README.md
+++ b/examples/with-vpc-s3-endpoint/README.md
@@ -16,20 +16,20 @@ $ terraform apply
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 3.4 |
## Providers
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | >= 5.32 |
+| [aws](#provider\_aws) | >= 5.79 |
| [random](#provider\_random) | >= 3.4 |
## Modules
@@ -81,4 +81,4 @@ No inputs.
| [lambda\_role\_name](#output\_lambda\_role\_name) | The name of the IAM role created for the Lambda Function |
| [local\_filename](#output\_local\_filename) | The filename of zip archive deployed (if deployment was from local) |
| [s3\_object](#output\_s3\_object) | The map with S3 object data of zip archive deployed (if deployment was from S3) |
-
+
diff --git a/examples/with-vpc-s3-endpoint/versions.tf b/examples/with-vpc-s3-endpoint/versions.tf
index fd604c66..7a4d860b 100644
--- a/examples/with-vpc-s3-endpoint/versions.tf
+++ b/examples/with-vpc-s3-endpoint/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/examples/with-vpc/README.md b/examples/with-vpc/README.md
index 28844bbd..a29f0bb3 100644
--- a/examples/with-vpc/README.md
+++ b/examples/with-vpc/README.md
@@ -16,13 +16,13 @@ $ terraform apply
Note that this example may create resources which cost money. Run `terraform destroy` when you don't need these resources.
-
+
## Requirements
| Name | Version |
|------|---------|
| [terraform](#requirement\_terraform) | >= 1.0 |
-| [aws](#requirement\_aws) | >= 5.32 |
+| [aws](#requirement\_aws) | >= 5.79 |
| [random](#requirement\_random) | >= 2.0 |
## Providers
@@ -72,4 +72,4 @@ No inputs.
| [lambda\_role\_name](#output\_lambda\_role\_name) | The name of the IAM role created for the Lambda Function |
| [local\_filename](#output\_local\_filename) | The filename of zip archive deployed (if deployment was from local) |
| [s3\_object](#output\_s3\_object) | The map with S3 object data of zip archive deployed (if deployment was from S3) |
-
+
diff --git a/examples/with-vpc/versions.tf b/examples/with-vpc/versions.tf
index 55278d04..5cf868aa 100644
--- a/examples/with-vpc/versions.tf
+++ b/examples/with-vpc/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
random = {
source = "hashicorp/random"
diff --git a/main.tf b/main.tf
index d30b16e2..110228bd 100644
--- a/main.tf
+++ b/main.tf
@@ -92,8 +92,9 @@ resource "aws_lambda_function" "this" {
dynamic "vpc_config" {
for_each = var.vpc_subnet_ids != null && var.vpc_security_group_ids != null ? [true] : []
content {
- security_group_ids = var.vpc_security_group_ids
- subnet_ids = var.vpc_subnet_ids
+ security_group_ids = var.vpc_security_group_ids
+ subnet_ids = var.vpc_subnet_ids
+ ipv6_allowed_for_dual_stack = var.ipv6_allowed_for_dual_stack
}
}
@@ -272,13 +273,14 @@ resource "aws_lambda_permission" "current_version_triggers" {
function_name = aws_lambda_function.this[0].function_name
qualifier = aws_lambda_function.this[0].version
- statement_id_prefix = try(each.value.statement_id, each.key)
- action = try(each.value.action, "lambda:InvokeFunction")
- principal = try(each.value.principal, format("%s.amazonaws.com", try(each.value.service, "")))
- principal_org_id = try(each.value.principal_org_id, null)
- source_arn = try(each.value.source_arn, null)
- source_account = try(each.value.source_account, null)
- event_source_token = try(each.value.event_source_token, null)
+ statement_id_prefix = try(each.value.statement_id, each.key)
+ action = try(each.value.action, "lambda:InvokeFunction")
+ principal = try(each.value.principal, format("%s.amazonaws.com", try(each.value.service, "")))
+ principal_org_id = try(each.value.principal_org_id, null)
+ source_arn = try(each.value.source_arn, null)
+ source_account = try(each.value.source_account, null)
+ event_source_token = try(each.value.event_source_token, null)
+ function_url_auth_type = try(each.value.function_url_auth_type, null)
lifecycle {
create_before_destroy = true
@@ -291,13 +293,14 @@ resource "aws_lambda_permission" "unqualified_alias_triggers" {
function_name = aws_lambda_function.this[0].function_name
- statement_id_prefix = try(each.value.statement_id, each.key)
- action = try(each.value.action, "lambda:InvokeFunction")
- principal = try(each.value.principal, format("%s.amazonaws.com", try(each.value.service, "")))
- principal_org_id = try(each.value.principal_org_id, null)
- source_arn = try(each.value.source_arn, null)
- source_account = try(each.value.source_account, null)
- event_source_token = try(each.value.event_source_token, null)
+ statement_id_prefix = try(each.value.statement_id, each.key)
+ action = try(each.value.action, "lambda:InvokeFunction")
+ principal = try(each.value.principal, format("%s.amazonaws.com", try(each.value.service, "")))
+ principal_org_id = try(each.value.principal_org_id, null)
+ source_arn = try(each.value.source_arn, null)
+ source_account = try(each.value.source_account, null)
+ event_source_token = try(each.value.event_source_token, null)
+ function_url_auth_type = try(each.value.function_url_auth_type, null)
lifecycle {
create_before_destroy = true
@@ -323,6 +326,7 @@ resource "aws_lambda_event_source_mapping" "this" {
topics = try(each.value.topics, null)
queues = try(each.value.queues, null)
function_response_types = try(each.value.function_response_types, null)
+ tumbling_window_in_seconds = try(each.value.tumbling_window_in_seconds, null)
dynamic "destination_config" {
for_each = try(each.value.destination_arn_on_failure, null) != null ? [true] : []
@@ -382,6 +386,34 @@ resource "aws_lambda_event_source_mapping" "this" {
}
}
}
+
+ dynamic "document_db_event_source_config" {
+ for_each = try(each.value.document_db_event_source_config, [])
+
+ content {
+ database_name = document_db_event_source_config.value.database_name
+ collection_name = try(document_db_event_source_config.value.collection_name, null)
+ full_document = try(document_db_event_source_config.value.full_document, null)
+ }
+ }
+
+ dynamic "metrics_config" {
+ for_each = try([each.value.metrics_config], [])
+
+ content {
+ metrics = metrics_config.value.metrics
+ }
+ }
+
+ dynamic "provisioned_poller_config" {
+ for_each = try([each.value.provisioned_poller_config], [])
+ content {
+ maximum_pollers = try(provisioned_poller_config.value.maximum_pollers, null)
+ minimum_pollers = try(provisioned_poller_config.value.minimum_pollers, null)
+ }
+ }
+
+ tags = merge(var.tags, try(each.value.tags, {}))
}
resource "aws_lambda_function_url" "this" {
@@ -408,6 +440,13 @@ resource "aws_lambda_function_url" "this" {
}
}
+resource "aws_lambda_function_recursion_config" "this" {
+ count = local.create && var.create_function && !var.create_layer && var.recursive_loop == "Allow" ? 1 : 0
+
+ function_name = aws_lambda_function.this[0].function_name
+ recursive_loop = var.recursive_loop
+}
+
# This resource contains the extra information required by SAM CLI to provide the testing capabilities
# to the TF application. The required data is where SAM CLI can find the Lambda function source code
# and what are the resources that contain the building logic.
diff --git a/modules/alias/README.md b/modules/alias/README.md
index 7744bbcc..4d1871d3 100644
--- a/modules/alias/README.md
+++ b/modules/alias/README.md
@@ -110,7 +110,7 @@ module "lambda" {
* [Alias](https://github.com/terraform-aws-modules/terraform-aws-lambda/tree/master/examples/alias) - Create Lambda function and aliases in various combinations with all supported features.
-
+
## Requirements
| Name | Version |
@@ -177,7 +177,7 @@ No modules.
| [lambda\_alias\_function\_version](#output\_lambda\_alias\_function\_version) | Lambda function version which the alias uses |
| [lambda\_alias\_invoke\_arn](#output\_lambda\_alias\_invoke\_arn) | The ARN to be used for invoking Lambda Function from API Gateway |
| [lambda\_alias\_name](#output\_lambda\_alias\_name) | The name of the Lambda Function Alias |
-
+
## Authors
diff --git a/modules/alias/main.tf b/modules/alias/main.tf
index b5f24f0f..e57079a2 100644
--- a/modules/alias/main.tf
+++ b/modules/alias/main.tf
@@ -155,6 +155,20 @@ resource "aws_lambda_event_source_mapping" "this" {
}
}
+ dynamic "self_managed_kafka_event_source_config" {
+ for_each = try(each.value.self_managed_kafka_event_source_config, [])
+ content {
+ consumer_group_id = try(self_managed_kafka_event_source_config.value.consumer_group_id, null)
+ }
+ }
+
+ dynamic "amazon_managed_kafka_event_source_config" {
+ for_each = try(each.value.amazon_managed_kafka_event_source_config, [])
+ content {
+ consumer_group_id = try(amazon_managed_kafka_event_source_config.value.consumer_group_id, null)
+ }
+ }
+
dynamic "source_access_configuration" {
for_each = try(each.value.source_access_configuration, [])
content {
diff --git a/modules/deploy/README.md b/modules/deploy/README.md
index 26f2b00f..6da1f6e8 100644
--- a/modules/deploy/README.md
+++ b/modules/deploy/README.md
@@ -95,7 +95,7 @@ module "lambda" {
* [Deploy](https://github.com/terraform-aws-modules/terraform-aws-lambda/tree/master/examples/deploy) - Creates Lambda Function, Alias, and all resources required to create deployments using AWS CodeDeploy.
-
+
## Requirements
| Name | Version |
@@ -151,10 +151,10 @@ No modules.
| [attach\_hooks\_policy](#input\_attach\_hooks\_policy) | Whether to attach Invoke policy to CodeDeploy role when before allow traffic or after allow traffic hooks are defined. | `bool` | `true` | no |
| [attach\_triggers\_policy](#input\_attach\_triggers\_policy) | Whether to attach SNS policy to CodeDeploy role when triggers are defined | `bool` | `false` | no |
| [auto\_rollback\_enabled](#input\_auto\_rollback\_enabled) | Indicates whether a defined automatic rollback configuration is currently enabled for this Deployment Group. | `bool` | `true` | no |
-| [auto\_rollback\_events](#input\_auto\_rollback\_events) | List of event types that trigger a rollback. Supported types are DEPLOYMENT\_FAILURE and DEPLOYMENT\_STOP\_ON\_ALARM. | `list(string)` | [
"DEPLOYMENT_STOP_ON_ALARM"
]
| no |
+| [auto\_rollback\_events](#input\_auto\_rollback\_events) | List of event types that trigger a rollback. Supported types are DEPLOYMENT\_FAILURE and DEPLOYMENT\_STOP\_ON\_ALARM. | `list(string)` | [
"DEPLOYMENT_STOP_ON_ALARM"
]
| no |
| [aws\_cli\_command](#input\_aws\_cli\_command) | Command to run as AWS CLI. May include extra arguments like region and profile. | `string` | `"aws"` | no |
| [before\_allow\_traffic\_hook\_arn](#input\_before\_allow\_traffic\_hook\_arn) | ARN of Lambda function to execute before allow traffic during deployment. This function should be named CodeDeployHook\_, to match the managed AWSCodeDeployForLambda policy, unless you're using a custom role | `string` | `""` | no |
-| [codedeploy\_principals](#input\_codedeploy\_principals) | List of CodeDeploy service principals to allow. The list can include global or regional endpoints. | `list(string)` | [
"codedeploy.amazonaws.com"
]
| no |
+| [codedeploy\_principals](#input\_codedeploy\_principals) | List of CodeDeploy service principals to allow. The list can include global or regional endpoints. | `list(string)` | [
"codedeploy.amazonaws.com"
]
| no |
| [codedeploy\_role\_name](#input\_codedeploy\_role\_name) | IAM role name to create or use by CodeDeploy | `string` | `""` | no |
| [create](#input\_create) | Controls whether resources should be created | `bool` | `true` | no |
| [create\_app](#input\_create\_app) | Whether to create new AWS CodeDeploy app | `bool` | `false` | no |
@@ -168,7 +168,7 @@ No modules.
| [force\_deploy](#input\_force\_deploy) | Force deployment every time (even when nothing changes) | `bool` | `false` | no |
| [function\_name](#input\_function\_name) | The name of the Lambda function to deploy | `string` | `""` | no |
| [get\_deployment\_sleep\_timer](#input\_get\_deployment\_sleep\_timer) | Adds additional sleep time to get-deployment command to avoid the service throttling | `number` | `5` | no |
-| [interpreter](#input\_interpreter) | List of interpreter arguments used to execute deploy script, first arg is path | `list(string)` | [
"/bin/bash",
"-c"
]
| no |
+| [interpreter](#input\_interpreter) | List of interpreter arguments used to execute deploy script, first arg is path | `list(string)` | [
"/bin/bash",
"-c"
]
| no |
| [run\_deployment](#input\_run\_deployment) | Run AWS CLI command to start the deployment | `bool` | `false` | no |
| [save\_deploy\_script](#input\_save\_deploy\_script) | Save deploy script locally | `bool` | `false` | no |
| [tags](#input\_tags) | A map of tags to assign to resources. | `map(string)` | `{}` | no |
@@ -191,7 +191,7 @@ No modules.
| [codedeploy\_iam\_role\_name](#output\_codedeploy\_iam\_role\_name) | Name of IAM role used by CodeDeploy |
| [deploy\_script](#output\_deploy\_script) | Path to a deployment script |
| [script](#output\_script) | Deployment script |
-
+
## Authors
diff --git a/modules/docker-build/README.md b/modules/docker-build/README.md
index d2c0aef8..bd223aaf 100644
--- a/modules/docker-build/README.md
+++ b/modules/docker-build/README.md
@@ -52,7 +52,7 @@ module "docker_image" {
* [Container Image](https://github.com/terraform-aws-modules/terraform-aws-lambda/tree/master/examples/container-image) - Creates Docker Image, ECR resository and deploys it Lambda Function.
-
+
## Requirements
| Name | Version |
@@ -116,7 +116,7 @@ No modules.
|------|-------------|
| [image\_id](#output\_image\_id) | The ID of the Docker image |
| [image\_uri](#output\_image\_uri) | The ECR image URI for deploying lambda |
-
+
## Authors
diff --git a/outputs.tf b/outputs.tf
index 6d53a66c..59197585 100644
--- a/outputs.tf
+++ b/outputs.tf
@@ -102,6 +102,11 @@ output "lambda_layer_version" {
}
# Lambda Event Source Mapping
+output "lambda_event_source_mapping_arn" {
+ description = "The event source mapping ARN"
+ value = { for k, v in aws_lambda_event_source_mapping.this : k => v.arn }
+}
+
output "lambda_event_source_mapping_function_arn" {
description = "The the ARN of the Lambda function the event source mapping is sending events to"
value = { for k, v in aws_lambda_event_source_mapping.this : k => v.function_arn }
diff --git a/package.py b/package.py
index 620be347..09bc23e6 100644
--- a/package.py
+++ b/package.py
@@ -272,12 +272,16 @@ def update_hash(hash_obj, file_root, file_path):
relative_path = os.path.join(file_root, file_path)
hash_obj.update(relative_path.encode())
- with open(relative_path, "rb") as open_file:
- while True:
- data = open_file.read(1024 * 8)
- if not data:
- break
- hash_obj.update(data)
+ try:
+ with open(relative_path, "rb") as open_file:
+ while True:
+ data = open_file.read(1024 * 8)
+ if not data:
+ break
+ hash_obj.update(data)
+ # ignore broken symlinks content to don't fail on `terraform destroy` command
+ except FileNotFoundError:
+ pass
class ZipWriteStream:
@@ -568,6 +572,10 @@ def compile(self, patterns):
rules.append((None, r))
self._rules = rules
+ def reset(self):
+ self._log.debug("reset filter patterns")
+ self._rules = None
+
def filter(self, path, prefix=None):
path = os.path.normpath(path)
if prefix:
@@ -671,9 +679,13 @@ def plan(self, source_path, query):
source_paths = []
build_plan = []
+ build_step = []
- step = lambda *x: build_plan.append(x)
- hash = source_paths.append
+ def step(*x):
+ build_step.append(x)
+
+ def hash(path):
+ source_paths.append(path)
def pip_requirements_step(path, prefix=None, required=False, tmp_dir=None):
command = runtime
@@ -743,35 +755,30 @@ def commands_step(path, commands):
if path:
path = os.path.normpath(path)
+ step("set:workdir", path)
+
batch = []
for c in commands:
if isinstance(c, str):
if c.startswith(":zip"):
if path:
hash(path)
- else:
- # If path doesn't defined for a block with
- # commands it will be set to Terraform's
- # current working directory
- # NB: cwd may vary when using Terraform 0.14+ like:
- # `terraform -chdir=...`
- path = query.paths.cwd
if batch:
- step("sh", path, "\n".join(batch))
+ step("sh", "\n".join(batch))
batch.clear()
c = shlex.split(c)
- if len(c) == 3:
+ n = len(c)
+ if n == 3:
_, _path, prefix = c
prefix = prefix.strip()
- _path = os.path.normpath(os.path.join(path, _path))
+ _path = os.path.normpath(_path)
step("zip:embedded", _path, prefix)
- elif len(c) == 2:
- prefix = None
+ elif n == 2:
_, _path = c
- step("zip:embedded", _path, prefix)
- elif len(c) == 1:
- prefix = None
- step("zip:embedded", path, prefix)
+ _path = os.path.normpath(_path)
+ step("zip:embedded", _path)
+ elif n == 1:
+ step("zip:embedded")
else:
raise ValueError(
":zip invalid call signature, use: "
@@ -780,7 +787,7 @@ def commands_step(path, commands):
else:
batch.append(c)
if batch:
- step("sh", path, "\n".join(batch))
+ step("sh", "\n".join(batch))
batch.clear()
for claim in claims:
@@ -858,6 +865,7 @@ def commands_step(path, commands):
tmp_dir=claim.get("npm_tmp_dir"),
)
if path:
+ path = os.path.normpath(path)
step("zip", path, prefix)
if patterns:
# Take patterns into account when computing hash
@@ -868,111 +876,164 @@ def commands_step(path, commands):
hash(path_from_pattern)
else:
hash(path)
-
- if patterns:
- step("clear:filter")
else:
raise ValueError("Unsupported source_path item: {}".format(claim))
+ if build_step:
+ build_plan.append(build_step)
+ build_step = []
+
self._source_paths = source_paths
return build_plan
def execute(self, build_plan, zip_stream, query):
+ sh_log = logging.getLogger("sh")
+
+ tf_work_dir = os.getcwd()
+
zs = zip_stream
sh_work_dir = None
pf = None
- for action in build_plan:
- cmd = action[0]
- if cmd.startswith("zip"):
- ts = 0 if cmd == "zip:embedded" else None
- source_path, prefix = action[1:]
- if sh_work_dir:
- if source_path != sh_work_dir:
- if not os.path.isfile(source_path):
- source_path = sh_work_dir
- if os.path.isdir(source_path):
- if pf:
- self._zip_write_with_filter(
- zs, pf, source_path, prefix, timestamp=ts
- )
+ for step in build_plan:
+ # init step
+ sh_work_dir = tf_work_dir
+ if pf:
+ pf.reset()
+ pf = None
+
+ log.debug("STEPDIR: %s", sh_work_dir)
+
+ # execute step actions
+ for action in step:
+ cmd = action[0]
+ if cmd.startswith("zip"):
+ ts = 0 if cmd == "zip:embedded" else None
+
+ source_path, prefix = None, None
+ n = len(action)
+ if n == 2:
+ source_path = action[1]
+ elif n == 3:
+ source_path, prefix = action[1:]
+
+ if source_path:
+ if not os.path.isabs(source_path):
+ source_path = os.path.normpath(
+ os.path.join(sh_work_dir, source_path)
+ )
else:
- zs.write_dirs(source_path, prefix=prefix, timestamp=ts)
- else:
- zs.write_file(source_path, prefix=prefix, timestamp=ts)
- elif cmd == "pip":
- runtime, pip_requirements, prefix, tmp_dir = action[1:]
- with install_pip_requirements(query, pip_requirements, tmp_dir) as rd:
- if rd:
- if pf:
- self._zip_write_with_filter(zs, pf, rd, prefix, timestamp=0)
- else:
- # XXX: timestamp=0 - what actually do with it?
- zs.write_dirs(rd, prefix=prefix, timestamp=0)
- elif cmd == "poetry":
- (
- runtime,
- path,
- poetry_export_extra_args,
- prefix,
- ) = action[1:]
- log.info("poetry_export_extra_args: %s", poetry_export_extra_args)
- with install_poetry_dependencies(
- query, path, poetry_export_extra_args
- ) as rd:
- if rd:
- if pf:
- self._zip_write_with_filter(zs, pf, rd, prefix, timestamp=0)
- else:
- # XXX: timestamp=0 - what actually do with it?
- zs.write_dirs(rd, prefix=prefix, timestamp=0)
- elif cmd == "npm":
- runtime, npm_requirements, prefix, tmp_dir = action[1:]
- with install_npm_requirements(query, npm_requirements, tmp_dir) as rd:
- if rd:
+ source_path = sh_work_dir
+ if os.path.isdir(source_path):
if pf:
- self._zip_write_with_filter(zs, pf, rd, prefix, timestamp=0)
+ self._zip_write_with_filter(
+ zs, pf, source_path, prefix, timestamp=ts
+ )
else:
- # XXX: timestamp=0 - what actually do with it?
- zs.write_dirs(rd, prefix=prefix, timestamp=0)
- elif cmd == "sh":
- with tempfile.NamedTemporaryFile(mode="w+t", delete=True) as temp_file:
- path, script = action[1:]
- # NOTE: Execute `pwd` to determine the subprocess shell's working directory after having executed all other commands.
- script = f"{script} && pwd >{temp_file.name}"
- p = subprocess.Popen(
- script,
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- cwd=path,
- )
+ zs.write_dirs(source_path, prefix=prefix, timestamp=ts)
+ else:
+ zs.write_file(source_path, prefix=prefix, timestamp=ts)
+ elif cmd == "pip":
+ runtime, pip_requirements, prefix, tmp_dir = action[1:]
+ with install_pip_requirements(
+ query, pip_requirements, tmp_dir
+ ) as rd:
+ if rd:
+ if pf:
+ self._zip_write_with_filter(
+ zs, pf, rd, prefix, timestamp=0
+ )
+ else:
+ # XXX: timestamp=0 - what actually do with it?
+ zs.write_dirs(rd, prefix=prefix, timestamp=0)
+ elif cmd == "poetry":
+ (
+ runtime,
+ path,
+ poetry_export_extra_args,
+ prefix,
+ ) = action[1:]
+ log.info("poetry_export_extra_args: %s", poetry_export_extra_args)
+ with install_poetry_dependencies(
+ query, path, poetry_export_extra_args
+ ) as rd:
+ if rd:
+ if pf:
+ self._zip_write_with_filter(
+ zs, pf, rd, prefix, timestamp=0
+ )
+ else:
+ # XXX: timestamp=0 - what actually do with it?
+ zs.write_dirs(rd, prefix=prefix, timestamp=0)
+ elif cmd == "npm":
+ runtime, npm_requirements, prefix, tmp_dir = action[1:]
+ with install_npm_requirements(
+ query, npm_requirements, tmp_dir
+ ) as rd:
+ if rd:
+ if pf:
+ self._zip_write_with_filter(
+ zs, pf, rd, prefix, timestamp=0
+ )
+ else:
+ # XXX: timestamp=0 - what actually do with it?
+ zs.write_dirs(rd, prefix=prefix, timestamp=0)
+ elif cmd == "sh":
+ with tempfile.NamedTemporaryFile(
+ mode="w+t", delete=True
+ ) as temp_file:
+ script = action[1]
+
+ if log.isEnabledFor(DEBUG2):
+ log.debug("exec shell script ...")
+ for line in script.splitlines():
+ sh_log.debug(line)
+
+ script = "\n".join(
+ (
+ script,
+ # NOTE: Execute `pwd` to determine the subprocess shell's
+ # working directory after having executed all other commands.
+ "retcode=$?",
+ f"pwd >{temp_file.name}",
+ "exit $retcode",
+ )
+ )
- p.wait()
- temp_file.seek(0)
+ p = subprocess.Popen(
+ script,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=sh_work_dir,
+ )
- # NOTE: This var `sh_work_dir` is consumed in cmd == "zip" loop
- sh_work_dir = temp_file.read().strip()
+ call_stdout, call_stderr = p.communicate()
+ exit_code = p.returncode
+ log.debug("exit_code: %s", exit_code)
+ if exit_code != 0:
+ raise RuntimeError(
+ "Script did not run successfully, exit code {}: {} - {}".format(
+ exit_code,
+ call_stdout.decode("utf-8").strip(),
+ call_stderr.decode("utf-8").strip(),
+ )
+ )
- log.info("WD: %s", sh_work_dir)
+ temp_file.seek(0)
+ # NOTE: This var `sh_work_dir` is consumed in cmd == "zip" loop
+ sh_work_dir = temp_file.read().strip()
+ log.debug("WORKDIR: %s", sh_work_dir)
- call_stdout, call_stderr = p.communicate()
- exit_code = p.returncode
- log.info("exit_code: %s", exit_code)
- if exit_code != 0:
- raise RuntimeError(
- "Script did not run successfully, exit code {}: {} - {}".format(
- exit_code,
- call_stdout.decode("utf-8").strip(),
- call_stderr.decode("utf-8").strip(),
- )
- )
- elif cmd == "set:filter":
- patterns = action[1]
- pf = ZipContentFilter(args=self._args)
- pf.compile(patterns)
- elif cmd == "clear:filter":
- pf = None
+ elif cmd == "set:workdir":
+ path = action[1]
+ sh_work_dir = os.path.normpath(os.path.join(tf_work_dir, path))
+ log.debug("WORKDIR: %s", sh_work_dir)
+
+ elif cmd == "set:filter":
+ patterns = action[1]
+ pf = ZipContentFilter(args=self._args)
+ pf.compile(patterns)
@staticmethod
def _zip_write_with_filter(
@@ -1575,11 +1636,11 @@ def prepare_command(args):
content_hash = content_hash.hexdigest()
# Generate a unique filename based on the hash.
- filename = os.path.join(artifacts_dir, "{}.zip".format(content_hash))
+ zip_filename = os.path.join(artifacts_dir, "{}.zip".format(content_hash))
# Compute timestamp trigger
was_missing = False
- filename_path = os.path.join(os.getcwd(), filename)
+ filename_path = os.path.join(os.getcwd(), zip_filename)
if recreate_missing_package:
if os.path.exists(filename_path):
st = os.stat(filename_path)
@@ -1592,7 +1653,7 @@ def prepare_command(args):
# Replace variables in the build command with calculated values.
build_data = {
- "filename": filename,
+ "filename": zip_filename,
"runtime": runtime,
"artifacts_dir": artifacts_dir,
"build_plan": build_plan,
@@ -1612,7 +1673,7 @@ def prepare_command(args):
# Output the result to Terraform.
json.dump(
{
- "filename": filename,
+ "filename": zip_filename,
"build_plan": build_plan,
"build_plan_filename": build_plan_filename,
"timestamp": str(timestamp),
diff --git a/tests/test_package_toml.py b/tests/test_package_toml.py
index b60550a4..9eba3f4a 100644
--- a/tests/test_package_toml.py
+++ b/tests/test_package_toml.py
@@ -26,7 +26,7 @@ def test_build_manager_failing_command():
bpm = BuildPlanManager(args=Mock())
with raises(Exception):
bpm.execute(
- build_plan=[["sh", "/tmp", "NOTACOMMAND"]],
+ build_plan=[[["sh", "/tmp", "NOTACOMMAND"]]],
zip_stream=None,
query=None,
)
diff --git a/tests/test_zip_source.py b/tests/test_zip_source.py
index f46466e1..dd6750ca 100644
--- a/tests/test_zip_source.py
+++ b/tests/test_zip_source.py
@@ -12,8 +12,10 @@ def test_zip_source_path_sh_work_dir():
bpm.execute(
build_plan=[
- ["sh", ".", "cd $(mktemp -d)\n echo pip install"],
- ["zip:embedded", ".", "./python"],
+ [
+ ["sh", "cd $(mktemp -d)\n echo pip install"],
+ ["zip:embedded", ".", "./python"],
+ ]
],
zip_stream=zs,
query=None,
@@ -33,8 +35,10 @@ def test_zip_source_path():
bpm.execute(
build_plan=[
- ["sh", ".", "echo pip install"],
- ["zip:embedded", ".", "./python"],
+ [
+ ["sh", "echo pip install"],
+ ["zip:embedded", ".", "./python"],
+ ]
],
zip_stream=zs,
query=None,
diff --git a/variables.tf b/variables.tf
index 259b8374..2bb9f7a3 100644
--- a/variables.tf
+++ b/variables.tf
@@ -176,6 +176,12 @@ variable "vpc_security_group_ids" {
default = null
}
+variable "ipv6_allowed_for_dual_stack" {
+ description = "Allows outbound IPv6 traffic on VPC functions that are connected to dual-stack subnets"
+ type = bool
+ default = null
+}
+
variable "tags" {
description = "A map of tags to assign to resources."
type = map(string)
@@ -813,3 +819,13 @@ variable "logging_log_group" {
type = string
default = null
}
+
+############################################
+# Lambda Recursive Loop Settings
+############################################
+
+variable "recursive_loop" {
+ description = "Lambda function recursion configuration. Valid values are Allow or Terminate."
+ type = string
+ default = null
+}
diff --git a/versions.tf b/versions.tf
index 6c511ac4..bf462c65 100644
--- a/versions.tf
+++ b/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
external = {
source = "hashicorp/external"
diff --git a/wrappers/main.tf b/wrappers/main.tf
index d875508d..44bd8636 100644
--- a/wrappers/main.tf
+++ b/wrappers/main.tf
@@ -68,6 +68,7 @@ module "wrapper" {
image_config_working_directory = try(each.value.image_config_working_directory, var.defaults.image_config_working_directory, null)
image_uri = try(each.value.image_uri, var.defaults.image_uri, null)
invoke_mode = try(each.value.invoke_mode, var.defaults.invoke_mode, null)
+ ipv6_allowed_for_dual_stack = try(each.value.ipv6_allowed_for_dual_stack, var.defaults.ipv6_allowed_for_dual_stack, null)
kms_key_arn = try(each.value.kms_key_arn, var.defaults.kms_key_arn, null)
lambda_at_edge = try(each.value.lambda_at_edge, var.defaults.lambda_at_edge, false)
lambda_at_edge_logs_all_regions = try(each.value.lambda_at_edge_logs_all_regions, var.defaults.lambda_at_edge_logs_all_regions, true)
@@ -97,6 +98,7 @@ module "wrapper" {
publish = try(each.value.publish, var.defaults.publish, false)
putin_khuylo = try(each.value.putin_khuylo, var.defaults.putin_khuylo, true)
recreate_missing_package = try(each.value.recreate_missing_package, var.defaults.recreate_missing_package, true)
+ recursive_loop = try(each.value.recursive_loop, var.defaults.recursive_loop, null)
replace_security_groups_on_destroy = try(each.value.replace_security_groups_on_destroy, var.defaults.replace_security_groups_on_destroy, null)
replacement_security_group_ids = try(each.value.replacement_security_group_ids, var.defaults.replacement_security_group_ids, null)
reserved_concurrent_executions = try(each.value.reserved_concurrent_executions, var.defaults.reserved_concurrent_executions, -1)
diff --git a/wrappers/versions.tf b/wrappers/versions.tf
index 6c511ac4..bf462c65 100644
--- a/wrappers/versions.tf
+++ b/wrappers/versions.tf
@@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
- version = ">= 5.32"
+ version = ">= 5.79"
}
external = {
source = "hashicorp/external"