Skip to content

Commit

Permalink
fix checkstyle failures
Browse files Browse the repository at this point in the history
  • Loading branch information
AssahBismarkabah committed Oct 2, 2024
1 parent 6ecc023 commit a6ca0a6
Show file tree
Hide file tree
Showing 5 changed files with 170 additions and 159 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import de.adorsys.datasafe.types.api.types.ReadStorePassword;
import de.adorsys.datasafe.types.api.utils.ExecutorServiceUtil;
import de.adorsys.datasafe.types.api.utils.ReadKeyPasswordTestFactory;

import java.io.InputStream;
import java.io.OutputStream;
import java.security.UnrecoverableKeyException;
Expand All @@ -47,6 +48,7 @@
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import lombok.SneakyThrows;
import lombok.experimental.Delegate;
import lombok.extern.slf4j.Slf4j;
Expand All @@ -58,9 +60,11 @@
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;

import static de.adorsys.datasafe.types.api.shared.DockerUtil.getDockerUri;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;

/**
* This test distributes users' storage access keystore, document encryption keystore,
* users' private files into buckets that reside on different buckets. Bootstrap knows only how to
Expand Down Expand Up @@ -128,41 +132,41 @@ static void stopAll() {
@BeforeEach
void initDatasafe() {
StorageService directoryStorage = new S3StorageService(
S3ClientFactory.getClient(
endpointsByHostNoBucket.get(CREDENTIALS),
REGION,
accessKey(CREDENTIALS),
secretKey(CREDENTIALS)
),
S3ClientFactory.getClient(
endpointsByHostNoBucket.get(CREDENTIALS),
REGION,
accessKey(CREDENTIALS),
secretKey(CREDENTIALS)
),
REGION,
CREDENTIALS,
EXECUTOR
EXECUTOR
);

OverridesRegistry registry = new BaseOverridesRegistry();
this.datasafeServices = DaggerDefaultDatasafeServices.builder()
.config(new DefaultDFSConfig(endpointsByHost.get(CREDENTIALS), new ReadStorePassword("PAZZWORT")))
.overridesRegistry(registry)
.storage(new RegexDelegatingStorage(
ImmutableMap.<Pattern, StorageService>builder()
.put(Pattern.compile(endpointsByHost.get(CREDENTIALS) + ".+"), directoryStorage)
.put(
Pattern.compile(LOCALHOST + ".+"),
new UriBasedAuthStorageService(
acc -> new S3StorageService(
S3ClientFactory.getClient(
acc.getEndpoint(),
acc.getRegion(),
acc.getAccessKey(),
acc.getSecretKey()
),
acc.getRegion(),
acc.getBucketName(),
EXECUTOR
)
)
).build())
).build();
.config(new DefaultDFSConfig(endpointsByHost.get(CREDENTIALS), new ReadStorePassword("PAZZWORT")))
.overridesRegistry(registry)
.storage(new RegexDelegatingStorage(
ImmutableMap.<Pattern, StorageService>builder()
.put(Pattern.compile(endpointsByHost.get(CREDENTIALS) + ".+"), directoryStorage)
.put(
Pattern.compile(LOCALHOST + ".+"),
new UriBasedAuthStorageService(
acc -> new S3StorageService(
S3ClientFactory.getClient(
acc.getEndpoint(),
acc.getRegion(),
acc.getAccessKey(),
acc.getSecretKey()
),
acc.getRegion(),
acc.getBucketName(),
EXECUTOR
)
)
).build())
).build();

BucketAccessServiceImplRuntimeDelegatable.overrideWith(
registry, args -> new WithCredentialProvider(args.getStorageKeyStoreOperations())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import de.adorsys.datasafe.types.api.resource.StorageIdentifier;
import de.adorsys.datasafe.types.api.shared.AwsClientRetry;
import de.adorsys.datasafe.types.api.utils.ExecutorServiceUtil;

import java.io.OutputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
Expand All @@ -35,6 +36,7 @@
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.regex.Pattern;

import lombok.SneakyThrows;
import lombok.experimental.Delegate;
import lombok.extern.slf4j.Slf4j;
Expand All @@ -44,6 +46,7 @@
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;

import static de.adorsys.datasafe.examples.business.s3.MinioContainerId.DIRECTORY_BUCKET;
import static de.adorsys.datasafe.examples.business.s3.MinioContainerId.FILES_BUCKET_ONE;
import static de.adorsys.datasafe.examples.business.s3.MinioContainerId.FILES_BUCKET_TWO;
Expand Down Expand Up @@ -121,26 +124,26 @@ void testMultiUserStorageUserSetup() {
.storage(
new RegexDelegatingStorage(
ImmutableMap.<Pattern, StorageService>builder()
// bind URI that contains `directoryBucket` to directoryStorage
.put(Pattern.compile(directoryBucketS3Uri + ".+"), directoryStorage)
.put(
Pattern.compile(getDockerUri("http://127.0.0.1") + ".+"),
// Dynamically creates S3 client with bucket name equal to host value
new UriBasedAuthStorageService(
acc -> new S3StorageService(
S3ClientFactory.getClient(
acc.getEndpoint(),
acc.getRegion(),
acc.getAccessKey(),
acc.getSecretKey()
),
acc.getRegion(),
// Bucket name is encoded in first path segment
acc.getBucketName(),
EXECUTOR
)
)
).build()
// bind URI that contains `directoryBucket` to directoryStorage
.put(Pattern.compile(directoryBucketS3Uri + ".+"), directoryStorage)
.put(
Pattern.compile(getDockerUri("http://127.0.0.1") + ".+"),
// Dynamically creates S3 client with bucket name equal to host value
new UriBasedAuthStorageService(
acc -> new S3StorageService(
S3ClientFactory.getClient(
acc.getEndpoint(),
acc.getRegion(),
acc.getAccessKey(),
acc.getSecretKey()
),
acc.getRegion(),
// Bucket name is encoded in first path segment
acc.getBucketName(),
EXECUTOR
)
)
).build()
)
)
.overridesRegistry(registry)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,14 @@
import de.adorsys.datasafe.types.api.context.overrides.OverridesRegistry;
import de.adorsys.datasafe.types.api.types.ReadStorePassword;
import de.adorsys.datasafe.types.api.utils.ExecutorServiceUtil;

import java.net.URI;
import java.nio.file.Paths;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.regex.Pattern;

import lombok.experimental.Delegate;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
Expand Down Expand Up @@ -130,10 +132,10 @@ VersionedDatasafeServices versionedDatasafeServices(StorageService storageServic
StorageService clientCredentials(AmazonS3 s3, S3Factory factory, DatasafeProperties properties) {
ExecutorService executorService = ExecutorServiceUtil.submitterExecutesOnStarvationExecutingService();
S3StorageService basicStorage = new S3StorageService(
s3,
properties.getAmazonRegion(),
properties.getBucketName(),
executorService
s3,
properties.getAmazonRegion(),
properties.getBucketName(),
executorService
);

return new RegexDelegatingStorage(
Expand Down Expand Up @@ -184,10 +186,10 @@ StorageService singleStorageServiceFilesystem(DatasafeProperties properties) {
@ConditionalOnProperty(name = DATASAFE_S3_STORAGE, havingValue = "true")
StorageService singleStorageServiceS3(AmazonS3 s3, DatasafeProperties properties) {
return new S3StorageService(
s3,
properties.getAmazonRegion(),
properties.getBucketName(),
ExecutorServiceUtil.submitterExecutesOnStarvationExecutingService()
s3,
properties.getAmazonRegion(),
properties.getBucketName(),
ExecutorServiceUtil.submitterExecutesOnStarvationExecutingService()
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,14 @@
import de.adorsys.datasafe.types.api.types.ReadKeyPassword;
import de.adorsys.datasafe.types.api.types.ReadStorePassword;
import de.adorsys.datasafe.types.api.utils.ExecutorServiceUtil;

import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.nio.file.FileSystems;
import java.util.List;
import java.util.stream.Collectors;

import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.SneakyThrows;
Expand Down Expand Up @@ -297,14 +299,14 @@ private static SystemRootAndStorageService useAmazonS3(AmazonS3DFSCredentials df
amazons3.createBucket(amazonS3DFSCredentials.getContainer());
}
StorageService storageService = new S3StorageService(
amazons3,
amazonS3DFSCredentials.getRegion(),
amazonS3DFSCredentials.getContainer(),
ExecutorServiceUtil
.submitterExecutesOnStarvationExecutingService(
amazonS3DFSCredentials.getThreadPoolSize(),
amazonS3DFSCredentials.getQueueSize()
)
amazons3,
amazonS3DFSCredentials.getRegion(),
amazonS3DFSCredentials.getContainer(),
ExecutorServiceUtil
.submitterExecutesOnStarvationExecutingService(
amazonS3DFSCredentials.getThreadPoolSize(),
amazonS3DFSCredentials.getQueueSize()
)
);
URI systemRoot = URI.create(S3_PREFIX + amazonS3DFSCredentials.getRootBucket());
log.info("build DFS to S3 with root " + amazonS3DFSCredentials.getRootBucket() + " and url " + amazonS3DFSCredentials.getUrl());
Expand Down
Loading

0 comments on commit a6ca0a6

Please sign in to comment.