Skip to content

Commit

Permalink
Updated test code
Browse files Browse the repository at this point in the history
  • Loading branch information
Suprajaa-27 committed Dec 21, 2023
1 parent 2c32a6c commit 438eeef
Show file tree
Hide file tree
Showing 6 changed files with 178 additions and 45 deletions.
63 changes: 29 additions & 34 deletions src/lambda_function.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,49 @@
import json
import boto3


def lambda_handler(event, context):
bucket_name = 's3-trigger-lambda-function-test12345'
file_name = 'employees.json'

s3 = boto3.client('s3')

try:
# Extract bucket name and object key from the S3 event
bucket_name = event["Records"][0]["s3"]["bucket"]["name"]
file_name = event["Records"][0]["s3"]["object"]["key"]

s3 = boto3.client("s3")

response = s3.get_object(Bucket=bucket_name, Key=file_name)
content = response['Body'].read().decode('utf-8')
content = response["Body"].read().decode("utf-8")
employees_data = json.loads(content)

department_totals = {}

for employee in employees_data:
department = employee.get('department')
salary = employee.get('salary')


if salary is None or not isinstance(salary, (int, float)) or department is None:
department = employee.get("department")
salary = employee.get("salary")

if (
salary is None
or not isinstance(salary, (int, float))
or department is None
or not isinstance(department,(str))
):
raise ValueError("Invalid/missing data for department or salary..")

if department not in department_totals:
department_totals[department] = salary
else:
department_totals[department] += salary

sorted_departments = sorted(department_totals.items(), key=lambda x: x[1], reverse=True)

for department, total_salary in sorted_departments:
print(f"Department: {department}, Total Salary: {total_salary}")

return {
'statusCode': 200,
'body': json.dumps(department_totals)
}


# Sort the department totals by total salary
sorted_departments = dict(sorted(department_totals.items(), key=lambda x: x[1], reverse=True))

return {"statusCode": 200, "body": json.dumps(sorted_departments)}

except ValueError as ve:
error_message = f"Value error occurred: {str(ve)}"
print(error_message)
return {
'statusCode': 400,
'body': json.dumps({'error': error_message})
}

return {"statusCode": 400, "body": json.dumps({"error": error_message})}

except Exception as e:
error_message = f"An unexpected error occurred: {str(e)}"
print(error_message)
return {
'statusCode': 500,
'body': json.dumps({'error': error_message})
}
return {"statusCode": 500, "body": json.dumps({"error": error_message})}
3 changes: 1 addition & 2 deletions terraform/iam.tf
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,7 @@ resource "aws_iam_policy" "s3_access_policy" {
"Effect": "Allow",
"Action": [
"s3:GetObject",
"s3:ListBucket",
"s3:PutObject"
"s3:ListBucket"
],
"Resource": [
"${aws_s3_bucket.s3_bucket.arn}",
Expand Down
4 changes: 2 additions & 2 deletions terraform/log_group.tf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
resource "aws_cloudwatch_log_group" "app_log_group" {
name = "/aws/lambda/${var.lambda_function_name}"
retention_in_days = 1
name = "lambda_app_logs"
retention_in_days = 5
lifecycle {
prevent_destroy = false
}
Expand Down
9 changes: 3 additions & 6 deletions terraform/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,14 @@ resource "aws_lambda_function" "s3_trigger_lambda_function" {
runtime = var.runtime
role = aws_iam_role.aws_lambda_role.arn
filename = "${path.module}/lambda_function.zip"
tracing_config {
mode = "Active"
}

# Attach the custom log group ARN
environment {
variables = {
CW_LOG_GROUP = aws_cloudwatch_log_group.app_log_group.arn
LOG_GROUP_NAME = aws_cloudwatch_log_group.app_log_group.name
}
}
}


#Resource to configure Lambda as an event trigger for S3 bucket.
resource "aws_s3_bucket_notification" "bucket_notification" {
bucket = aws_s3_bucket.s3_bucket.id
Expand Down
2 changes: 1 addition & 1 deletion terraform/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,4 @@ variable "bucket_name" {
type = string
description = "S3 bucket to trigger lambda function when a json file is created"
default = "s3-trigger-lambda-function-test12345"
}
}
142 changes: 142 additions & 0 deletions tests/test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import json
import unittest
from unittest.mock import patch, MagicMock
import sys
sys.path.append('../src') # To add 'src' directory to the module search path
# sys.path.insert(0, '../src') # Adjust the path based on your project structure


from lambda_function import lambda_handler


class TestLambdaFunction(unittest.TestCase):

@patch('lambda_function.boto3')
def test_lambda_handler_valid_data(self, mock_boto3):
# Define a sample event with valid data
event = {
"Records": [
{
"s3": {
"bucket": {
"name": "test-bucket"
},
"object": {
"key": "test-file.json"
}
}
}
]
}

# Mock the S3 client and its get_object method to return sample employee data
mock_s3 = MagicMock()
mock_boto3.client.return_value = mock_s3
mock_s3.get_object.return_value = {
"Body": MagicMock(read=MagicMock(return_value=json.dumps([
{"department": "HR", "salary": 50000},
{"department": "Finance", "salary": 60000},
{"department": "HR", "salary": 55000}
]).encode('utf-8')))
}

# Execute the lambda handler function with the mocked S3 client
result = lambda_handler(event, None)

# Define the expected result based on the mocked employee data
expected_result = {
"Finance": 60000,
"HR": 105000
}

# Assert that the result matches the expected department totals
self.assertEqual(result['statusCode'], 200)
self.assertEqual(json.loads(result['body']), expected_result)


@patch('lambda_function.boto3')
def test_lambda_handler_salary_not_numeric_or_none(self, mock_boto3):
# Define a sample event with missing or invalid salary values
event = {
"Records": [
{
"s3": {
"bucket": {
"name": "test-bucket"
},
"object": {
"key": "test-file.json"
}
}
}
]
}

# Mock the S3 client and its get_object method to return sample employee data
mock_s3 = MagicMock()
mock_boto3.client.return_value = mock_s3
mock_s3.get_object.return_value = {
"Body": MagicMock(read=MagicMock(return_value=json.dumps([
{"department": "HR", "salary": "invalid"},
{"department": "Finance", "salary": None},
{"department": "HR", "salary": "60000"}
]).encode('utf-8')))
}

# Execute the lambda handler function with the mocked S3 client
result = lambda_handler(event, None)

# Define the expected result when salary is not numeric or None
expected_result = {
"error": "Value error occurred: Invalid/missing data for department or salary.."
}

# Assert that the result contains the expected error message
self.assertEqual(result['statusCode'], 400)
self.assertEqual(json.loads(result['body']), expected_result)

@patch('lambda_function.boto3')
def test_lambda_handler_department_invalid_missing_values(self, mock_boto3):
# Define a sample event with missing or invalid department values
event = {
"Records": [
{
"s3": {
"bucket": {
"name": "test-bucket"
},
"object": {
"key": "test-file.json"
}
}
}
]
}

# Mock the S3 client and its get_object method to return sample employee data
mock_s3 = MagicMock()
mock_boto3.client.return_value = mock_s3
mock_s3.get_object.return_value = {
"Body": MagicMock(read=MagicMock(return_value=json.dumps([
{"department": None, "salary": 50000},
{"department": "HR", "salary": None},
{"department": 32873, "salary": 55000}
]).encode('utf-8')))
}

# Execute the lambda handler function with the mocked S3 client
result = lambda_handler(event, None)

# Define the expected result when department values are None or non string
expected_result = {
"error": "Value error occurred: Invalid/missing data for department or salary.."
}

# Assert that the result contains the expected error message
self.assertEqual(result['statusCode'], 400)
self.assertEqual(json.loads(result['body']), expected_result)

# ... (Previous test case remains unchanged)

if __name__ == '__main__':
unittest.main()

0 comments on commit 438eeef

Please sign in to comment.