diff --git a/pom.xml b/pom.xml
index 2d8e109..0766dd1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -31,6 +31,11 @@
org.springframework.boot
spring-boot-starter-log4j2
+
+ software.amazon.awssdk
+ s3
+ 2.25.70
+
org.springframework.boot
spring-boot-starter-test
diff --git a/src/main/java/com/app/config/AwsS3Config.java b/src/main/java/com/app/config/AwsS3Config.java
new file mode 100644
index 0000000..952544c
--- /dev/null
+++ b/src/main/java/com/app/config/AwsS3Config.java
@@ -0,0 +1,45 @@
+package com.app.config;
+
+
+import lombok.extern.log4j.Log4j2;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
+import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+
+@Configuration
+@ConditionalOnProperty(name = "aws.s3.enabled", havingValue = "true")
+@Log4j2
+public class AwsS3Config {
+
+ @Value("${aws.s3.region:us-east-1}")
+ private String awsRegion;
+ @Value("${aws.s3.accessKeyId:us-east-1}")
+ private String accessKeyId;
+ @Value("${aws.s3.secretAccessKey}")
+ private String secretAccessKey;
+
+ @Bean
+ public S3Client s3Client() {
+ try {
+ log.info("Trying to S3Client create.");
+ return S3Client.builder()
+ .region(Region.of(awsRegion))
+ .credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccessKey)))
+ .overrideConfiguration(ClientOverrideConfiguration.builder()
+ .retryPolicy(r -> r.numRetries(3))
+ .build())
+ .build();
+ } catch (Exception e) {
+ log.error("Failed to create S3Client.", e);
+ throw e;
+ } finally {
+ log.info("S3Client created successfully.");
+ }
+ }
+}
diff --git a/src/main/java/com/app/controller/S3Controller.java b/src/main/java/com/app/controller/S3Controller.java
new file mode 100644
index 0000000..f58a4b4
--- /dev/null
+++ b/src/main/java/com/app/controller/S3Controller.java
@@ -0,0 +1,88 @@
+package com.app.controller;
+
+import com.app.service.S3Service;
+import lombok.RequiredArgsConstructor;
+import org.springframework.core.io.InputStreamResource;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+import java.util.Map;
+import java.util.zip.ZipOutputStream;
+
+@RestController
+@RequiredArgsConstructor
+@RequestMapping("/api/files")
+public class S3Controller {
+
+ private final S3Service s3Service;
+
+ @PostMapping("/upload")
+ public ResponseEntity uploadFile(@RequestPart("file") MultipartFile file,
+ @RequestParam(value = "isReadPublicly", defaultValue = "false") boolean isReadPublicly) {
+ boolean isUploaded = s3Service.uploadFile(file, isReadPublicly);
+ if (isUploaded) {
+ return ResponseEntity.ok("File uploaded successfully: " + file.getOriginalFilename());
+ } else {
+ return ResponseEntity.status(500).body("Failed to upload file: " + file.getOriginalFilename());
+ }
+ }
+
+ @GetMapping("/download/{key}")
+ public ResponseEntity downloadFile(@PathVariable String key) {
+ InputStream fileStream = s3Service.downloadFileAsStream(key);
+ InputStreamResource resource = new InputStreamResource(fileStream);
+ return ResponseEntity.ok()
+ .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=" + key)
+ .contentType(MediaType.APPLICATION_OCTET_STREAM)
+ .body(resource);
+ }
+
+ @GetMapping("/create-bucket")
+ public ResponseEntity createBucket(@RequestParam String bucketName) {
+ return ResponseEntity.ok(s3Service.createBucket(bucketName));
+ }
+
+ @GetMapping("/bucket-list")
+ public ResponseEntity> getBucketList() {
+ return ResponseEntity.ok(s3Service.getBucketList());
+ }
+
+ @GetMapping("/list-buckets-with-regions")
+ public Map listBucketsWithRegions() {
+ return s3Service.listBucketsWithRegions();
+ }
+
+ @GetMapping("/download-all-files-zip")
+ public ResponseEntity downloadAllFilesAsZip(@RequestParam String bucketName) {
+
+ // Streaming response to handle large files efficiently
+ StreamingResponseBody responseBody = outputStream -> {
+ try (ZipOutputStream zos = new ZipOutputStream(outputStream)) {
+ s3Service.streamAllFilesAsZip(bucketName, zos);
+ } catch (IOException e) {
+ throw new RuntimeException("Error while streaming files to output stream", e);
+ }
+ };
+
+ // Set headers for ZIP file download
+ HttpHeaders headers = new HttpHeaders();
+ headers.add("Content-Disposition", "attachment; filename=all-files.zip");
+ headers.add("Content-Type", "application/zip");
+
+ return new ResponseEntity<>(responseBody, headers, HttpStatus.OK);
+ }
+
+ @GetMapping("/move-files")
+ public String moveFiles(@RequestParam String sourceBucketName, @RequestParam String destinationBucketName) {
+ s3Service.moveFiles(sourceBucketName, destinationBucketName);
+ return "Files are being moved from " + sourceBucketName + " to " + destinationBucketName;
+ }
+}
diff --git a/src/main/java/com/app/service/S3Service.java b/src/main/java/com/app/service/S3Service.java
new file mode 100644
index 0000000..2a90f3b
--- /dev/null
+++ b/src/main/java/com/app/service/S3Service.java
@@ -0,0 +1,25 @@
+package com.app.service;
+
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.InputStream;
+import java.util.List;
+import java.util.Map;
+import java.util.zip.ZipOutputStream;
+
+public interface S3Service {
+
+ boolean uploadFile(MultipartFile file, boolean isReadPublicly);
+
+ InputStream downloadFileAsStream(String key);
+
+ String createBucket(String bucketName);
+
+ List getBucketList() throws RuntimeException;
+
+ Map listBucketsWithRegions();
+
+ void streamAllFilesAsZip(String bucketName, ZipOutputStream zos);
+
+ void moveFiles(String sourceBucketName, String destinationBucketName);
+}
diff --git a/src/main/java/com/app/service/S3ServiceImpl.java b/src/main/java/com/app/service/S3ServiceImpl.java
new file mode 100644
index 0000000..609d37c
--- /dev/null
+++ b/src/main/java/com/app/service/S3ServiceImpl.java
@@ -0,0 +1,254 @@
+package com.app.service;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.log4j.Log4j2;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+import software.amazon.awssdk.core.ResponseBytes;
+import software.amazon.awssdk.core.ResponseInputStream;
+import software.amazon.awssdk.core.exception.SdkException;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+@Service
+@Log4j2
+@RequiredArgsConstructor
+public class S3ServiceImpl implements S3Service {
+
+ private final S3Client s3Client;
+ private static final int THREAD_POOL_SIZE = 10; // Number of threads for concurrent execution
+
+
+ @Value("${aws.s3.bucket-name}")
+ private String bucketName;
+
+
+ @Override
+ public boolean uploadFile(MultipartFile file, boolean isReadPublicly) {
+ log.info("Started uploading file '{}' to S3 Bucket '{}'", file.getOriginalFilename(), bucketName);
+ PutObjectRequest putObjectRequest;
+ if (isReadPublicly) {
+ putObjectRequest = PutObjectRequest.builder()
+ .bucket(bucketName)
+ .key(file.getOriginalFilename()).acl("public-read")
+ .build();
+ } else {
+ putObjectRequest = PutObjectRequest.builder()
+ .bucket(bucketName)
+ .key(file.getOriginalFilename())
+ .build();
+ }
+ try {
+ s3Client.putObject(putObjectRequest, RequestBody.fromBytes(file.getBytes()));
+ log.info("Successfully uploaded file to S3. Bucket: {}, Key: {}", bucketName, file.getOriginalFilename());
+ return true;
+ } catch (Exception e) {
+ log.error("Failed to upload file to S3. Bucket: {}, Key: {}", bucketName, file.getOriginalFilename(), e);
+ return false;
+ }
+ }
+
+ @Override
+ public InputStream downloadFileAsStream(String key) {
+ try {
+ GetObjectRequest getObjectRequest = GetObjectRequest.builder()
+ .bucket(bucketName)
+ .key(key)
+ .build();
+
+ ResponseBytes getObjectResponse = s3Client.getObjectAsBytes(getObjectRequest);
+ if (getObjectResponse == null) {
+ log.warn("Failed to get file from S3 bucket: Response is null");
+ return new ByteArrayInputStream(new byte[0]);
+ }
+
+ log.info("Successfully getting file in bytes from S3 bucket.");
+ byte[] fileBytes = getObjectResponse.asByteArray();
+ return new ByteArrayInputStream(fileBytes);
+
+ } catch (S3Exception e) {
+ log.error("Failed to fetch object from S3 Bucket: {}, Key: {}", bucketName, key, e);
+ throw e;
+ } catch (SdkException e) {
+ log.error("Error while downloading file from S3 Bucket: {}, Key: {}", bucketName, key, e);
+ throw e;
+ }
+ }
+
+ public String createBucket(String bucketName) {
+ try {
+ CreateBucketRequest createBucketRequest = CreateBucketRequest.builder()
+ .bucket(bucketName)
+ .build();
+ CreateBucketResponse createBucketResponse = s3Client.createBucket(createBucketRequest);
+ return "Bucket created successfully: " + createBucketResponse.location();
+ } catch (S3Exception e) {
+ throw new RuntimeException("Failed to create bucket: " + e.awsErrorDetails().errorMessage(), e);
+ }
+ }
+
+ @Override
+ public List getBucketList() throws RuntimeException {
+ try {
+ ListBucketsResponse listBucketsResponse = s3Client.listBuckets();
+ return listBucketsResponse.buckets().stream()
+ .map(Bucket::name)
+ .toList();
+ } catch (S3Exception e) {
+ throw new RuntimeException("Failed to list buckets: " + e.awsErrorDetails().errorMessage(), e);
+ }
+ }
+
+ @Override
+ public Map listBucketsWithRegions() {
+ try {
+ ListBucketsResponse listBucketsResponse = s3Client.listBuckets();
+
+ // Create a map to store bucket names with their respective regions
+ Map bucketRegions = new HashMap<>();
+
+ for (var bucket : listBucketsResponse.buckets()) {
+ String bucketRegion = getBucketRegion(bucket.name());
+ bucketRegions.put(bucket.name(), bucketRegion);
+ }
+
+ return bucketRegions;
+
+ } catch (S3Exception e) {
+ throw new RuntimeException("Failed to list buckets: " + e.awsErrorDetails().errorMessage(), e);
+ }
+ }
+
+ private String getBucketRegion(String bucketName) {
+ try {
+ GetBucketLocationRequest locationRequest = GetBucketLocationRequest.builder()
+ .bucket(bucketName)
+ .build();
+
+ GetBucketLocationResponse locationResponse = s3Client.getBucketLocation(locationRequest);
+
+ // Translate the bucket location constraint to a region name
+ Region region = locationResponse.locationConstraintAsString() == null ? Region.US_EAST_1 :
+ Region.of(locationResponse.locationConstraintAsString());
+
+ return region.id();
+ } catch (S3Exception e) {
+ return "Unknown"; // Handle the case where the region is not accessible or available
+ }
+ }
+
+ @Override
+ public void streamAllFilesAsZip(String bucketName, ZipOutputStream zos) {
+ ListObjectsV2Request listObjectsRequest = ListObjectsV2Request.builder()
+ .bucket(bucketName)
+ .build();
+
+ ListObjectsV2Response listObjectsResponse;
+ do {
+ listObjectsResponse = s3Client.listObjectsV2(listObjectsRequest);
+ List objects = listObjectsResponse.contents();
+
+ for (S3Object object : objects) {
+ addFileToZipStream(bucketName, object.key(), zos);
+ }
+
+ } while (listObjectsResponse.isTruncated());
+ }
+
+ private void addFileToZipStream(String bucketName, String keyName, ZipOutputStream zos) {
+ GetObjectRequest getObjectRequest = GetObjectRequest.builder()
+ .bucket(bucketName)
+ .key(keyName)
+ .build();
+
+ try (ResponseInputStream> s3ObjectStream = s3Client.getObject(getObjectRequest)) {
+ zos.putNextEntry(new ZipEntry(keyName));
+
+ byte[] buffer = new byte[1024];
+ int length;
+ while ((length = s3ObjectStream.read(buffer)) > 0) {
+ zos.write(buffer, 0, length);
+ }
+
+ zos.closeEntry();
+ } catch (IOException | S3Exception e) {
+ throw new RuntimeException("Failed to add file to ZIP: " + keyName, e);
+ }
+ }
+
+ @Override
+ public void moveFiles(String sourceBucketName, String destinationBucketName) {
+ ExecutorService executorService = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
+ ListObjectsV2Request listObjectsRequest = ListObjectsV2Request.builder()
+ .bucket(sourceBucketName)
+ .build();
+
+ try {
+ ListObjectsV2Response listObjectsResponse;
+ do {
+ listObjectsResponse = s3Client.listObjectsV2(listObjectsRequest);
+ List objects = listObjectsResponse.contents();
+
+ for (S3Object object : objects) {
+ String keyName = object.key();
+ // Submit the copy and delete tasks to be executed concurrently
+ executorService.submit(() -> copyAndDeleteObject(sourceBucketName, destinationBucketName, keyName));
+ }
+
+ } while (listObjectsResponse.isTruncated());
+
+ } catch (S3Exception e) {
+ log.error("Failed to list objects from bucket: {} - {}", sourceBucketName, e.getMessage());
+ } finally {
+ executorService.shutdown();
+ try {
+ if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
+ executorService.shutdownNow();
+ }
+ } catch (InterruptedException e) {
+ executorService.shutdownNow();
+ Thread.currentThread().interrupt();
+ }
+ }
+ }
+
+ private void copyAndDeleteObject(String sourceBucketName, String destinationBucketName, String keyName) {
+ try {
+ // Copy file to the destination bucket
+ CopyObjectRequest copyRequest = CopyObjectRequest.builder()
+ .sourceBucket(sourceBucketName)
+ .sourceKey(keyName)
+ .destinationBucket(destinationBucketName)
+ .destinationKey(keyName)
+ .build();
+ s3Client.copyObject(copyRequest);
+ log.info("Copied file: {} from {} to {}", keyName, sourceBucketName, destinationBucketName);
+
+ // Delete file from the source bucket
+ DeleteObjectRequest deleteRequest = DeleteObjectRequest.builder()
+ .bucket(sourceBucketName)
+ .key(keyName)
+ .build();
+ s3Client.deleteObject(deleteRequest);
+ log.info("Deleted file: {} from {}", keyName, sourceBucketName);
+
+ } catch (S3Exception e) {
+ log.error("Error while moving file: {} - {}", keyName, e.getMessage());
+ }
+ }
+}
diff --git a/src/main/resources/application-local.yml b/src/main/resources/application-local.yml
index d2f8b2d..11b1e06 100644
--- a/src/main/resources/application-local.yml
+++ b/src/main/resources/application-local.yml
@@ -2,3 +2,10 @@ app:
logs:
path: C:/${spring.application.name}/logs
+aws:
+ s3:
+ enabled: true
+ region: us-east-1
+ accessKeyId: <>
+ secretAccessKey: <>
+ bucket-name: <>
\ No newline at end of file