diff --git a/.github/workflows/gradle-ci.yml b/.github/workflows/gradle-ci.yml index 25bbfb6..2ab80df 100644 --- a/.github/workflows/gradle-ci.yml +++ b/.github/workflows/gradle-ci.yml @@ -38,6 +38,9 @@ jobs: steps: # Set up build environment + - name: Prepare git + if: ${{ matrix.os != 'ubuntu-latest' }} + run: git config --global core.autocrlf false - name: Checkout uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: diff --git a/build.gradle.kts b/build.gradle.kts index 2c05083..31f8c64 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -146,6 +146,7 @@ subprojects { excludes = mutableListOf( "com.github.nagyesta.filebarj.core.backup.FileParseException", "com.github.nagyesta.filebarj.core.backup.worker.WindowsFileMetadataParser", + "com.github.nagyesta.filebarj.core.progress.NoOpProgressTracker", "com.github.nagyesta.filebarj.core.restore.worker.WindowsFileMetadataSetter", "com.github.nagyesta.filebarj.job.Main", "com.github.nagyesta.filebarj.job.Controller" @@ -220,10 +221,8 @@ subprojects { setOutputFormat("json") //noinspection UnnecessaryQualifiedReference val attachmentText = org.cyclonedx.model.AttachmentText() - attachmentText.setText( - Base64.getEncoder().encodeToString( - file("${project.rootProject.projectDir}/LICENSE").readBytes() - ) + attachmentText.text = Base64.getEncoder().encodeToString( + file("${project.rootProject.projectDir}/LICENSE").readBytes() ) attachmentText.encoding = "base64" attachmentText.contentType = "text/plain" diff --git a/file-barj-core/README.md b/file-barj-core/README.md index 4b319b7..4d56547 100644 --- a/file-barj-core/README.md +++ b/file-barj-core/README.md @@ -57,7 +57,11 @@ final var configuration = BackupJobConfiguration.builder() .chunkSizeMebibyte(1) .encryptionKey(null) .build(); -final var backupController = new BackupController(configuration, false); +final var backupParameters = BackupParameters.builder() + .job(configuration) + .forceFull(false) + .build(); +final var backupController = new BackupController(backupParameters); //executing the backup backupController.execute(1); @@ -66,13 +70,14 @@ backupController.execute(1); ### Merging increments ```java -final var mergeController = new MergeController( - Path.of("/tmp/backup"), - "prefix", - null, //optional key encryption key - 123L, //Backup start epoch seconds for the first file of the range (inclusive) - 234L //Backup start epoch seconds for the last file of the range (inclusive) -); +final var mergeParameters = MergeParameters.builder() + .backupDirectory(Path.of("/tmp/backup")) + .fileNamePrefix("prefix") + .kek(null) //optional key encryption key + .rangeStartEpochSeconds(123L) //Backup start epoch seconds for the first file of the range (inclusive) + .rangeEndEpochSeconds(234L) //Backup start epoch seconds for the last file of the range (inclusive) + .build(); +final var mergeController = new MergeController(mergeParameters); mergeController.execute(false); ``` @@ -91,8 +96,13 @@ final var restoreTask = RestoreTask.builder() .includedPath(BackupPath.of("/source/dir")) //optional path filter .permissionComparisonStrategy(PermissionComparisonStrategy.STRICT) //optional .build(); -final var pointInTime = 123456L; -final var restoreController = new RestoreController(Path.of("/tmp/backup"), "test", null, pointInTime); +final var restoreParameters = RestoreParameters.builder() + .backupDirectory(Path.of("/tmp/backup")) + .fileNamePrefix("test") + .kek(null) + .atPointInTime(123456L) + .build(); +final var restoreController = new RestoreController(restoreParameters); //executing the restore restoreController.execute(restoreTask); @@ -104,7 +114,12 @@ restoreController.execute(restoreTask); //configuring the inspection job final var backupDir = Path.of("/backup/directory"); final var outputFile = Path.of("/backup/directory"); -final var controller = new IncrementInspectionController(backupDir, "file-prefix", null); +final var inspectParameters = InspectParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix("file-prefix") + .kek(null) + .build(); +final var controller = new IncrementInspectionController(inspectParameters); //list the summary of the available increments controller.inspectIncrements(System.out); @@ -119,7 +134,12 @@ controller.inspectContent(Long.MAX_VALUE, outputFile); //configuring the deletion job final var backupDir = Path.of("/backup/directory"); final var outputFile = Path.of("/backup/directory"); -final var controller = new IncrementDeletionController(backupDir, "file-prefix", null); +final var deletionParameters = IncrementDeletionParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix("file-prefix") + .kek(null) + .build(); +final var controller = new IncrementDeletionController(deletionParameters); //Delete all backup increments: // - starting with the one created at 123456 diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupController.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupController.java index 2460b9e..e0d2ee6 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupController.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupController.java @@ -5,12 +5,14 @@ import com.github.nagyesta.filebarj.core.backup.worker.FileMetadataParser; import com.github.nagyesta.filebarj.core.backup.worker.FileMetadataParserFactory; import com.github.nagyesta.filebarj.core.common.*; -import com.github.nagyesta.filebarj.core.config.BackupJobConfiguration; import com.github.nagyesta.filebarj.core.model.BackupIncrementManifest; import com.github.nagyesta.filebarj.core.model.BackupPath; import com.github.nagyesta.filebarj.core.model.FileMetadata; import com.github.nagyesta.filebarj.core.model.enums.BackupType; import com.github.nagyesta.filebarj.core.model.enums.FileType; +import com.github.nagyesta.filebarj.core.progress.ObservableProgressTracker; +import com.github.nagyesta.filebarj.core.progress.ProgressStep; +import com.github.nagyesta.filebarj.core.progress.ProgressTracker; import com.github.nagyesta.filebarj.core.util.LogUtil; import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiverFileOutputStream; import lombok.Getter; @@ -22,10 +24,10 @@ import java.nio.file.Path; import java.util.*; import java.util.concurrent.ForkJoinPool; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.*; import static com.github.nagyesta.filebarj.core.util.TimerUtil.toProcessSummary; import static com.github.nagyesta.filebarj.io.stream.internal.ChunkingFileOutputStream.MEBIBYTE; @@ -36,8 +38,9 @@ @Slf4j public class BackupController { private static final int BATCH_SIZE = 250000; + private static final List PROGRESS_STEPS = List.of(LOAD_MANIFESTS, SCAN_FILES, PARSE_METADATA, BACKUP); private final FileMetadataParser metadataParser = FileMetadataParserFactory.newInstance(); - private final ManifestManager manifestManager = new ManifestManagerImpl(); + private final ManifestManager manifestManager; @Getter private final BackupIncrementManifest manifest; private final SortedMap previousManifests; @@ -47,22 +50,31 @@ public class BackupController { private final ReentrantLock executionLock = new ReentrantLock(); private FileMetadataChangeDetector changeDetector; private ForkJoinPool threadPool; + private final ProgressTracker progressTracker; /** * Creates a new instance and initializes it for the specified job. * - * @param job the job configuration - * @param forceFull whether to force a full backup (overriding the configuration) + * @param parameters The parameters */ - public BackupController(@NonNull final BackupJobConfiguration job, final boolean forceFull) { + public BackupController(final @NonNull BackupParameters parameters) { + this.progressTracker = new ObservableProgressTracker(PROGRESS_STEPS); + progressTracker.registerListener(parameters.getProgressListener()); + this.manifestManager = new ManifestManagerImpl(progressTracker); + + final var job = parameters.getJob(); var backupType = job.getBackupType(); this.previousManifests = new TreeMap<>(); + final var forceFull = parameters.isForceFull(); if (!forceFull && backupType != BackupType.FULL) { this.previousManifests.putAll(manifestManager.loadPreviousManifestsForBackup(job)); if (previousManifests.isEmpty()) { backupType = BackupType.FULL; } } + if (forceFull) { + backupType = BackupType.FULL; + } this.manifest = manifestManager.generateManifest(job, backupType, previousManifests.size()); } @@ -107,19 +119,20 @@ private void listAffectedFilesFromBackupSources() { if (uniquePaths.isEmpty()) { throw new IllegalStateException("No files found in backup sources!"); } + progressTracker.completeStep(SCAN_FILES); detectCaseInsensitivityIssues(uniquePaths); log.info("Found {} unique paths in backup sources. Parsing metadata...", uniquePaths.size()); - final var doneCount = new AtomicInteger(0); + progressTracker.estimateStepSubtotal(PARSE_METADATA, uniquePaths.size()); this.filesFound = threadPool.submit(() -> uniquePaths.parallelStream() .map(path -> { final var fileMetadata = metadataParser.parse(path.toFile(), manifest.getConfiguration()); - LogUtil.logIfThresholdReached(doneCount.incrementAndGet(), uniquePaths.size(), - (done, total) -> log.info("Parsed {} of {} unique paths.", done, total)); + progressTracker.recordProgressInSubSteps(PARSE_METADATA); return fileMetadata; }) .collect(Collectors.toList())).join(); LogUtil.logStatistics(filesFound, (type, count) -> log.info("Found {} {} items in backup sources.", count, type)); + progressTracker.completeStep(PARSE_METADATA); } private void detectCaseInsensitivityIssues(final SortedSet uniquePaths) { @@ -182,6 +195,7 @@ private void executeBackup(final int threads) { final var totalBackupSize = backupFileSet.values().stream() .mapToLong(FileMetadata::getOriginalSizeBytes) .sum(); + progressTracker.estimateStepSubtotal(BACKUP, totalBackupSize); final var totalSize = totalBackupSize / MEBIBYTE; log.info("Backing up delta for {} files ({} MiB)", backupFileSet.size(), totalSize); try (var pipeline = getPipeline(threads)) { @@ -215,13 +229,14 @@ private void executeBackup(final int threads) { manifest.setIndexFileName(pipeline.getIndexFileWritten().getFileName().toString()); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = endTimeMillis - startTimeMillis; + progressTracker.completeStep(BACKUP); log.info("Archive write completed. Archive write took: {}", toProcessSummary(durationMillis, totalBackupSize)); } catch (final Exception e) { throw new ArchivalException("Archival process failed.", e); } } - private void findPreviousVersionToReuseOrAddToBackupFileSet(@NotNull final FileMetadata file) { + private void findPreviousVersionToReuseOrAddToBackupFileSet(final @NotNull FileMetadata file) { if (file.getFileType() == FileType.DIRECTORY) { updateDirectoryChangeStatus(file); manifest.getFiles().put(file.getId(), file); @@ -239,7 +254,7 @@ private void findPreviousVersionToReuseOrAddToBackupFileSet(@NotNull final FileM backupFileSet.put(file.getAbsolutePath(), file); } - private void updateDirectoryChangeStatus(@NotNull final FileMetadata file) { + private void updateDirectoryChangeStatus(final @NotNull FileMetadata file) { final var previousVersion = changeDetector.findPreviousVersionByAbsolutePath(file.getAbsolutePath()); if (previousVersion != null) { final var change = changeDetector.classifyChange(previousVersion, file); @@ -247,17 +262,20 @@ private void updateDirectoryChangeStatus(@NotNull final FileMetadata file) { } } - @NotNull - private BaseBackupPipeline getPipeline( + private @NotNull BaseBackupPipeline getPipeline( final int threads) throws IOException { + final BaseBackupPipeline pipeline; if (threads == 1) { - return new BackupPipeline(manifest); + pipeline = new BackupPipeline(manifest); } else { - return new ParallelBackupPipeline(manifest, threads); + pipeline = new ParallelBackupPipeline(manifest, threads); } + pipeline.setProgressTracker(progressTracker); + return pipeline; } private void saveManifest() { manifestManager.persist(manifest); } + } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupParameters.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupParameters.java new file mode 100644 index 0000000..9baa3bc --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupParameters.java @@ -0,0 +1,18 @@ +package com.github.nagyesta.filebarj.core.backup.pipeline; + +import com.github.nagyesta.filebarj.core.config.BackupJobConfiguration; +import com.github.nagyesta.filebarj.core.progress.LoggingProgressListener; +import com.github.nagyesta.filebarj.core.progress.ProgressListener; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; + +@Data +@Builder +public class BackupParameters { + private final @NonNull BackupJobConfiguration job; + @Builder.Default + private final boolean forceFull = false; + @Builder.Default + private final @NonNull ProgressListener progressListener = LoggingProgressListener.INSTANCE; +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupPipeline.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupPipeline.java index 71cb628..9ad6435 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupPipeline.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupPipeline.java @@ -21,13 +21,12 @@ public class BackupPipeline extends BaseBackupPipeline imp private final BackupIncrementManifest manifest; private final T outputStream; + @Setter + private @NonNull ProgressTracker progressTracker = new NoOpProgressTracker(); /** * Creates a new instance for the manifest that must be used for the backup. @@ -36,8 +42,8 @@ public class BaseBackupPipeline imp * @param outputStream The stream to write to */ protected BaseBackupPipeline( - @NotNull final BackupIncrementManifest manifest, - @NotNull final T outputStream) { + final @NotNull BackupIncrementManifest manifest, + final @NotNull T outputStream) { this.manifest = manifest; this.outputStream = outputStream; manifest.getVersions().forEach(version -> { @@ -77,7 +83,7 @@ public Path getIndexFileWritten() { * @throws ArchivalException When the file cannot be archived due to an I/O error from the stream */ public List storeEntries( - @NonNull final List> groupedFileMetadataList) throws ArchivalException { + final @NonNull List> groupedFileMetadataList) throws ArchivalException { return groupedFileMetadataList.stream().map(fileMetadataList -> { if (fileMetadataList == null || fileMetadataList.isEmpty()) { throw new IllegalArgumentException("File metadata list cannot be null or empty"); @@ -92,6 +98,7 @@ public List storeEntries( warnIfHashDoesNotMatch(duplicate, archivedFileMetadata); archivedFileMetadata.getFiles().add(duplicate.getId()); duplicate.setArchiveMetadataId(archivedFileMetadata.getId()); + reportProgress(duplicate); }); return archivedFileMetadata; } catch (final Exception e) { @@ -163,6 +170,7 @@ private void archiveContentAndUpdateMetadata( warnIfHashDoesNotMatch(fileMetadata, archivedFileMetadata); //commit fileMetadata.setArchiveMetadataId(archivedFileMetadata.getId()); + reportProgress(fileMetadata); } /** @@ -173,12 +181,18 @@ private void archiveContentAndUpdateMetadata( */ protected void warnIfHashDoesNotMatch(final FileMetadata fileMetadata, final ArchivedFileMetadata archivedFileMetadata) { if (!Objects.equals(archivedFileMetadata.getOriginalHash(), fileMetadata.getOriginalHash())) { - log.warn("The hash changed between delta calculation and archival for: " + fileMetadata.getAbsolutePath() - + " The archive might contain corrupt data for the file."); + log.warn("The hash changed between delta calculation and archival for: {} The archive might contain corrupt data for the file.", + fileMetadata.getAbsolutePath()); fileMetadata.setError("The hash changed between delta calculation and archival."); } } + protected void reportProgress(final FileMetadata fileMetadata) { + if (fileMetadata.getOriginalSizeBytes() > 0) { + progressTracker.recordProgressInSubSteps(ProgressStep.BACKUP, fileMetadata.getOriginalSizeBytes()); + } + } + private ArchiveEntryLocator createArchiveEntryLocator(final UUID archiveId) { return ArchiveEntryLocator.builder() .entryName(archiveId) diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipeline.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipeline.java index 3ea7027..a2020ac 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipeline.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipeline.java @@ -33,14 +33,13 @@ public class ParallelBackupPipeline extends BaseBackupPipeline storeEntries( - @NonNull final List> groupedFileMetadataList) throws ArchivalException { + final @NonNull List> groupedFileMetadataList) throws ArchivalException { final var fileCount = groupedFileMetadataList.stream().filter(Objects::nonNull).mapToInt(List::size).sum(); final var entryCount = groupedFileMetadataList.size(); log.info("Storing the file content of {} entries ({} files) in parallel", entryCount, fileCount); @@ -79,6 +78,7 @@ public List storeEntries( warnIfHashDoesNotMatch(duplicate, archived); duplicate.setArchiveMetadataId(archived.getId()); archived.getFiles().add(duplicate.getId()); + reportProgress(duplicate); }); return archived; }); @@ -118,6 +118,7 @@ private CompletableFuture archiveContentAndUpdateMetadata( warnIfHashDoesNotMatch(fileMetadata, archivedFileMetadata); //commit fileMetadata.setArchiveMetadataId(archivedFileMetadata.getId()); + reportProgress(fileMetadata); return archivedFileMetadata; }); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/DefaultBackupScopePartitioner.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/DefaultBackupScopePartitioner.java index 92a8f49..fff6293 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/DefaultBackupScopePartitioner.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/DefaultBackupScopePartitioner.java @@ -29,21 +29,19 @@ public class DefaultBackupScopePartitioner implements BackupScopePartitioner { */ public DefaultBackupScopePartitioner( final int batchSize, - @NonNull final DuplicateHandlingStrategy duplicateHandlingStrategy, - @NonNull final HashAlgorithm hashAlgorithm) { + final @NonNull DuplicateHandlingStrategy duplicateHandlingStrategy, + final @NonNull HashAlgorithm hashAlgorithm) { this.batchSize = batchSize; this.groupingFunction = duplicateHandlingStrategy.fileGroupingFunctionForHash(hashAlgorithm); } @Override - @NotNull - public List>> partitionBackupScope(@NonNull final Collection scope) { + public @NotNull List>> partitionBackupScope(final @NonNull Collection scope) { final var groupedScope = filterAndGroup(scope); return partition(groupedScope); } - @NotNull - private Collection> filterAndGroup(@NotNull final Collection scope) { + private @NotNull Collection> filterAndGroup(final @NotNull Collection scope) { return scope.stream() .filter(metadata -> metadata.getStatus().isStoreContent()) .filter(metadata -> metadata.getFileType().isContentSource()) @@ -51,8 +49,7 @@ private Collection> filterAndGroup(@NotNull final Collection< .values(); } - @NotNull - private List>> partition(@NotNull final Collection> groupedScope) { + private @NotNull List>> partition(final @NotNull Collection> groupedScope) { final List>> partitionedScope = new ArrayList<>(); var batch = new ArrayList>(); var size = 0; diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/FileMetadataParserFactory.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/FileMetadataParserFactory.java index deeae93..88227af 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/FileMetadataParserFactory.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/FileMetadataParserFactory.java @@ -15,8 +15,7 @@ public class FileMetadataParserFactory { * * @return the parser */ - @NotNull - public static FileMetadataParser newInstance() { + public static @NotNull FileMetadataParser newInstance() { final var isWindows = OsUtil.isWindows(); return newInstance(isWindows); } @@ -27,8 +26,7 @@ public static FileMetadataParser newInstance() { * @param isWindows should be true if the current OS is Windows * @return the parser */ - @NotNull - static PosixFileMetadataParser newInstance(final boolean isWindows) { + static @NotNull PosixFileMetadataParser newInstance(final boolean isWindows) { if (isWindows) { return new WindowsFileMetadataParser(); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/PosixFileMetadataParser.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/PosixFileMetadataParser.java index 1f8ddc9..0755eac 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/PosixFileMetadataParser.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/backup/worker/PosixFileMetadataParser.java @@ -34,10 +34,9 @@ public class PosixFileMetadataParser implements FileMetadataParser { */ public static final String DEFAULT_OWNER = "-"; - @NotNull @Override - public FileMetadata parse( - @NonNull final File file, @NonNull final BackupJobConfiguration configuration) { + public @NotNull FileMetadata parse( + final @NonNull File file, final @NonNull BackupJobConfiguration configuration) { if (!Files.exists(file.toPath(), LinkOption.NOFOLLOW_LINKS)) { return FileMetadata.builder() .id(UUID.randomUUID()) diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/BaseFileMetadataChangeDetector.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/BaseFileMetadataChangeDetector.java index f952127..3a36c5e 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/BaseFileMetadataChangeDetector.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/BaseFileMetadataChangeDetector.java @@ -29,8 +29,8 @@ public abstract class BaseFileMetadataChangeDetector implements FileMetadataC * @param permissionStrategy The permission comparison strategy */ protected BaseFileMetadataChangeDetector( - @NotNull final Map> filesFromManifests, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NotNull Map> filesFromManifests, + final @Nullable PermissionComparisonStrategy permissionStrategy) { this.filesFromManifests = new TreeMap<>(filesFromManifests); final SortedMap>> contentIndex = new TreeMap<>(); final Map nameIndex = new TreeMap<>(); @@ -42,8 +42,8 @@ protected BaseFileMetadataChangeDetector( @Override public boolean hasMetadataChanged( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { final var permissionsChanged = !permissionComparisonStrategy.matches(previousMetadata, currentMetadata); final var hiddenStatusChanged = currentMetadata.getHidden() != previousMetadata.getHidden(); final var timesChanged = currentMetadata.getFileType() != FileType.SYMBOLIC_LINK @@ -53,14 +53,13 @@ public boolean hasMetadataChanged( @Override public boolean isFromLastIncrement( - @NonNull final FileMetadata fileMetadata) { + final @NonNull FileMetadata fileMetadata) { return filesFromManifests.get(filesFromManifests.lastKey()).containsKey(fileMetadata.getId()); } - @Nullable @Override - public FileMetadata findMostRelevantPreviousVersion( - @NonNull final FileMetadata currentMetadata) { + public @Nullable FileMetadata findMostRelevantPreviousVersion( + final @NonNull FileMetadata currentMetadata) { final var increments = filesFromManifests.keySet().stream().sorted(Comparator.reverseOrder()).toList(); final var previousSamePath = nameIndex.getOrDefault(currentMetadata.getAbsolutePath().toString(), null); if (previousSamePath != null && !hasContentChanged(previousSamePath, currentMetadata)) { @@ -82,18 +81,16 @@ public FileMetadata findMostRelevantPreviousVersion( return previousSamePath; } - @Nullable @Override - public FileMetadata findPreviousVersionByAbsolutePath( - @NonNull final BackupPath absolutePath) { + public @Nullable FileMetadata findPreviousVersionByAbsolutePath( + final @NonNull BackupPath absolutePath) { return nameIndex.get(absolutePath.toString()); } - @NotNull @Override - public Change classifyChange( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + public @NotNull Change classifyChange( + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { if (currentMetadata.getFileType() == FileType.MISSING) { return Change.DELETED; } else if (previousMetadata.getFileType() == FileType.MISSING) { @@ -118,9 +115,9 @@ public Change classifyChange( protected abstract T getPrimaryContentCriteria(@NotNull FileMetadata metadata); private void index( - @NotNull final SortedMap> filesFromManifests, - @NotNull final SortedMap>> contentIndexMap, - @NotNull final Map nameIndexMap) { + final @NotNull SortedMap> filesFromManifests, + final @NotNull SortedMap>> contentIndexMap, + final @NotNull Map nameIndexMap) { filesFromManifests.forEach((increment, files) -> { files.forEach((uuid, metadata) -> contentIndexMap.computeIfAbsent(increment, k -> new HashMap<>()) .computeIfAbsent(getPrimaryContentCriteria(metadata), k -> new ArrayList<>()) diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/FileMetadataChangeDetectorFactory.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/FileMetadataChangeDetectorFactory.java index 7c5f91f..312a644 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/FileMetadataChangeDetectorFactory.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/FileMetadataChangeDetectorFactory.java @@ -23,9 +23,9 @@ public class FileMetadataChangeDetectorFactory { * @return The new instance */ public static FileMetadataChangeDetector create( - @NonNull final BackupJobConfiguration configuration, - @NonNull final Map> filesFromManifests, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NonNull BackupJobConfiguration configuration, + final @NonNull Map> filesFromManifests, + final @Nullable PermissionComparisonStrategy permissionStrategy) { if (filesFromManifests.isEmpty()) { throw new IllegalArgumentException("Previous manifests cannot be empty"); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/HashingFileMetadataChangeDetector.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/HashingFileMetadataChangeDetector.java index a77e960..cb2b134 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/HashingFileMetadataChangeDetector.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/HashingFileMetadataChangeDetector.java @@ -21,15 +21,15 @@ public class HashingFileMetadataChangeDetector extends BaseFileMetadataChangeDet * @param permissionStrategy The permission comparison strategy */ protected HashingFileMetadataChangeDetector( - @NotNull final Map> filesFromManifests, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NotNull Map> filesFromManifests, + final @Nullable PermissionComparisonStrategy permissionStrategy) { super(filesFromManifests, permissionStrategy); } @Override public boolean hasContentChanged( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { final var isContentSource = previousMetadata.getFileType().isContentSource() || currentMetadata.getFileType().isContentSource(); final var hasContentChanged = !Objects.equals(previousMetadata.getFileType(), currentMetadata.getFileType()) || !Objects.equals(previousMetadata.getOriginalHash(), currentMetadata.getOriginalHash()) @@ -38,7 +38,7 @@ public boolean hasContentChanged( } @Override - protected String getPrimaryContentCriteria(@NotNull final FileMetadata metadata) { + protected String getPrimaryContentCriteria(final @NotNull FileMetadata metadata) { return metadata.getOriginalHash(); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherInputStream.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherInputStream.java index 11b74d1..f64c461 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherInputStream.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherInputStream.java @@ -31,7 +31,7 @@ public class ManifestCipherInputStream extends DoOnCloseInputStream { * @throws IOException if an I/O error occurs */ public ManifestCipherInputStream( - @NonNull final InputStream source, @Nullable final PrivateKey key) throws IOException { + final @NonNull InputStream source, final @Nullable PrivateKey key) throws IOException { this.source = source; if (key != null) { final var encryptedKeyLength = ByteBuffer.wrap(source.readNBytes(BYTES_IN_INT)).getInt(); diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherOutputStream.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherOutputStream.java index 866c25e..dd41762 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherOutputStream.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestCipherOutputStream.java @@ -32,7 +32,7 @@ public class ManifestCipherOutputStream extends DoOnCloseOutputStream { * @throws IOException if an I/O error occurs */ public ManifestCipherOutputStream( - @NonNull final OutputStream destination, @Nullable final PublicKey key) throws IOException { + final @NonNull OutputStream destination, final @Nullable PublicKey key) throws IOException { this.destination = destination; if (key != null) { final var secretKey = EncryptionUtil.generateAesKey(); diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java index 0ecdfdf..1568b0b 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java @@ -7,6 +7,7 @@ import com.github.nagyesta.filebarj.core.model.enums.BackupType; import com.github.nagyesta.filebarj.core.model.enums.Change; import com.github.nagyesta.filebarj.core.model.enums.OperatingSystem; +import com.github.nagyesta.filebarj.core.progress.ProgressTracker; import com.github.nagyesta.filebarj.core.util.LogUtil; import com.github.nagyesta.filebarj.core.util.OsUtil; import jakarta.validation.Validation; @@ -32,6 +33,8 @@ import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.LOAD_MANIFESTS; + /** * Default implementation of {@link ManifestManager}. */ @@ -41,11 +44,17 @@ public class ManifestManagerImpl implements ManifestManager { private static final String MANIFEST_JSON_GZ = ".manifest.json.gz"; private final ObjectMapper mapper = new ObjectMapper(); private final Validator validator = createValidator(); + private final ProgressTracker progressTracker; + + public ManifestManagerImpl(final @NonNull ProgressTracker progressTracker) { + progressTracker.assertSupports(LOAD_MANIFESTS); + this.progressTracker = progressTracker; + } @Override public BackupIncrementManifest generateManifest( - @NonNull final BackupJobConfiguration jobConfiguration, - @NonNull final BackupType backupTypeOverride, + final @NonNull BackupJobConfiguration jobConfiguration, + final @NonNull BackupType backupTypeOverride, final int nextVersion) { final var startTimeEpochSecond = Instant.now().getEpochSecond(); final var fileNamePrefix = jobConfiguration.getFileNamePrefix() + "-" + startTimeEpochSecond; @@ -68,22 +77,22 @@ public BackupIncrementManifest generateManifest( @Override public void persist( - @NonNull final BackupIncrementManifest manifest) { + final @NonNull BackupIncrementManifest manifest) { final var backupDestination = manifest.getConfiguration().getDestinationDirectory(); persist(manifest, backupDestination); } @Override public void persist( - @NonNull final BackupIncrementManifest manifest, - @NonNull final Path backupDestination) { + final @NonNull BackupIncrementManifest manifest, + final @NonNull Path backupDestination) { validate(manifest, ValidationRules.Persisted.class); doPersist(manifest, backupDestination.toFile()); } private void doPersist( - @NotNull final BackupIncrementManifest manifest, - @NotNull final File backupDestination) { + final @NotNull BackupIncrementManifest manifest, + final @NotNull File backupDestination) { final var backupHistoryDir = new File(backupDestination, HISTORY_FOLDER); //noinspection ResultOfMethodCallIgnored backupHistoryDir.mkdirs(); @@ -112,9 +121,9 @@ private void doPersist( @Override public SortedMap load( - @NonNull final Path destinationDirectory, - @NonNull final String fileNamePrefix, - @Nullable final PrivateKey privateKey, + final @NonNull Path destinationDirectory, + final @NonNull String fileNamePrefix, + final @Nullable PrivateKey privateKey, final long latestBeforeEpochMillis) { try (var pathStream = Files.list(destinationDirectory)) { final var manifestFiles = pathStream @@ -132,9 +141,9 @@ public SortedMap load( @Override public SortedMap loadAll( - @NonNull final Path destinationDirectory, - @NonNull final String fileNamePrefix, - @Nullable final PrivateKey privateKey) { + final @NonNull Path destinationDirectory, + final @NonNull String fileNamePrefix, + final @Nullable PrivateKey privateKey) { try (var pathStream = Files.list(destinationDirectory)) { final var manifestFiles = pathStream .filter(path -> path.getFileName().toString().startsWith(fileNamePrefix)) @@ -149,7 +158,7 @@ public SortedMap loadAll( @Override public SortedMap loadPreviousManifestsForBackup( - @NonNull final BackupJobConfiguration job) { + final @NonNull BackupJobConfiguration job) { final var historyFolder = job.getDestinationDirectory().resolve(HISTORY_FOLDER); if (!Files.exists(historyFolder)) { return Collections.emptySortedMap(); @@ -174,7 +183,7 @@ public SortedMap loadPreviousManifestsForBacku @Override public RestoreManifest mergeForRestore( - @NonNull final SortedMap manifests) { + final @NonNull SortedMap manifests) { final var maximumAppVersion = findMaximumAppVersion(manifests); final var lastIncrementManifest = manifests.get(manifests.lastKey()); final var maximumTimeStamp = lastIncrementManifest.getStartTimeUtcEpochSeconds(); @@ -205,8 +214,8 @@ public RestoreManifest mergeForRestore( @Override public void validate( - @NonNull final BackupIncrementManifest manifest, - @NonNull final Class forAction) { + final @NonNull BackupIncrementManifest manifest, + final @NonNull Class forAction) { final var violations = validator.validate(manifest, forAction, Default.class); if (!violations.isEmpty()) { @@ -220,8 +229,8 @@ public void validate( @Override public void deleteIncrement( - @NonNull final Path backupDirectory, - @NonNull final BackupIncrementManifest manifest) { + final @NonNull Path backupDirectory, + final @NonNull BackupIncrementManifest manifest) { final var fileNamePrefix = manifest.getFileNamePrefix(); deleteManifestFromHistoryIfExists(backupDirectory, fileNamePrefix); deleteManifestAndArchiveFilesFromBackupDirectory(backupDirectory, fileNamePrefix); @@ -233,12 +242,12 @@ private static Validator createValidator() { } } - @NotNull - private SortedMap loadManifests( - @NotNull final List manifestFiles, - @Nullable final PrivateKey privateKey, + private @NotNull SortedMap loadManifests( + final @NotNull List manifestFiles, + final @Nullable PrivateKey privateKey, final long latestBeforeEpochMillis) { final SortedMap manifests = new TreeMap<>(); + progressTracker.estimateStepSubtotal(LOAD_MANIFESTS, manifestFiles.size()); for (final var path : manifestFiles) { try (var fileStream = new FileInputStream(path.toFile()); var bufferedStream = new BufferedInputStream(fileStream); @@ -258,15 +267,17 @@ private SortedMap loadManifests( } catch (final Exception e) { log.warn("Failed to load manifest file: {}", path, e); } + progressTracker.recordProgressInSubSteps(LOAD_MANIFESTS); } + progressTracker.completeStep(LOAD_MANIFESTS); return manifests; } - @NotNull - private SortedMap loadAllManifests( - @NotNull final List manifestFiles, - @Nullable final PrivateKey privateKey) { + private @NotNull SortedMap loadAllManifests( + final @NotNull List manifestFiles, + final @Nullable PrivateKey privateKey) { final SortedMap manifests = new TreeMap<>(); + progressTracker.estimateStepSubtotal(LOAD_MANIFESTS, manifestFiles.size()); for (final var path : manifestFiles) { try (var fileStream = new FileInputStream(path.toFile()); var bufferedStream = new BufferedInputStream(fileStream); @@ -277,6 +288,7 @@ private SortedMap loadAllManifests( .readValue(reader, BackupIncrementManifest.class); validate(manifest, ValidationRules.Persisted.class); manifests.put(manifest.getStartTimeUtcEpochSeconds(), manifest); + progressTracker.recordProgressInSubSteps(LOAD_MANIFESTS); } catch (final Exception e) { log.warn("Failed to load manifest file: {}", path, e); } @@ -284,6 +296,7 @@ private SortedMap loadAllManifests( if (manifests.isEmpty()) { throw new ArchivalException("No manifests found."); } + progressTracker.completeStep(LOAD_MANIFESTS); return manifests; } @@ -326,9 +339,8 @@ private void populateFilesAndArchiveEntries( }); } - @NotNull - private Set calculateRemainingFilesAndLinks( - @NotNull final BackupIncrementManifest lastIncrementManifest) { + private @NotNull Set calculateRemainingFilesAndLinks( + final @NotNull BackupIncrementManifest lastIncrementManifest) { return lastIncrementManifest.getFiles().values().stream() .filter(fileMetadata -> fileMetadata.getStatus() != Change.DELETED) .filter(fileMetadata -> fileMetadata.getFileType().isContentSource()) @@ -337,8 +349,8 @@ private Set calculateRemainingFilesAndLinks( } private void addDirectoriesToFiles( - @NotNull final BackupIncrementManifest lastIncrementManifest, - @NotNull final Map> files) { + final @NotNull BackupIncrementManifest lastIncrementManifest, + final @NotNull Map> files) { lastIncrementManifest.getFiles().values().stream() .filter(fileMetadata -> fileMetadata.getStatus() != Change.DELETED) .filter(fileMetadata -> !fileMetadata.getFileType().isContentSource()) @@ -346,26 +358,23 @@ private void addDirectoriesToFiles( .put(file.getId(), file)); } - @NotNull - private AppVersion findMaximumAppVersion( - @NotNull final SortedMap manifests) { + private @NotNull AppVersion findMaximumAppVersion( + final @NotNull SortedMap manifests) { return manifests.values().stream() .map(BackupIncrementManifest::getAppVersion) .max(AppVersion::compareTo) .orElse(new AppVersion()); } - @NotNull - private SortedMap> findAllFilenamePrefixes( - @NotNull final SortedMap manifests) { + private @NotNull SortedMap> findAllFilenamePrefixes( + final @NotNull SortedMap manifests) { final var result = new TreeMap>(); manifests.values().forEach(manifest -> result.put(manifest.getFileNamePrefix(), manifest.getVersions())); return result; } - @NotNull - private SortedMap> mergeEncryptionKeys( - @NotNull final SortedMap manifests) { + private @NotNull SortedMap> mergeEncryptionKeys( + final @NotNull SortedMap manifests) { final var keys = new TreeMap>(); manifests.values().stream() .map(BackupIncrementManifest::getEncryptionKeys) @@ -375,7 +384,7 @@ private SortedMap> mergeEncryptionKeys( } private void deleteManifestAndArchiveFilesFromBackupDirectory( - @NotNull final Path backupDirectory, @NotNull final String fileNamePrefix) { + final @NotNull Path backupDirectory, final @NotNull String fileNamePrefix) { final var patterns = Set.of( "^" + fileNamePrefix + "\\.[0-9]{5}\\.cargo$", "^" + fileNamePrefix + "\\.manifest\\.cargo$", @@ -402,7 +411,7 @@ private void deleteManifestAndArchiveFilesFromBackupDirectory( } private void deleteManifestFromHistoryIfExists( - @NotNull final Path backupDirectory, @NotNull final String fileNamePrefix) { + final @NotNull Path backupDirectory, final @NotNull String fileNamePrefix) { final var fromHistory = backupDirectory.resolve(HISTORY_FOLDER) .resolve(fileNamePrefix + MANIFEST_JSON_GZ); try { diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/PermissionComparisonStrategy.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/PermissionComparisonStrategy.java index 3a2a178..295fa06 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/PermissionComparisonStrategy.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/PermissionComparisonStrategy.java @@ -20,8 +20,8 @@ public enum PermissionComparisonStrategy { STRICT(true, true) { @Override public boolean matches( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { return Objects.equals(previousMetadata.getPosixPermissions(), currentMetadata.getPosixPermissions()) && Objects.equals(previousMetadata.getOwner(), currentMetadata.getOwner()) && Objects.equals(previousMetadata.getGroup(), currentMetadata.getGroup()); @@ -34,8 +34,8 @@ public boolean matches( PERMISSION_ONLY(true, false) { @Override public boolean matches( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { return Objects.equals(previousMetadata.getPosixPermissions(), currentMetadata.getPosixPermissions()); } }, @@ -49,13 +49,12 @@ public boolean matches( @Override public boolean matches( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { return Objects.equals(transform(previousMetadata.getPosixPermissions()), transform(currentMetadata.getPosixPermissions())); } - @Nullable - private static String transform(final String permissions) { + private static @Nullable String transform(final String permissions) { return Optional.ofNullable(permissions) .map(s -> s.substring(FIRST_SIGNIFICANT_CHAR_INCLUSIVE, LAST_SIGNIFICANT_CHAR_EXCLUSIVE)) .orElse(null); @@ -68,8 +67,8 @@ private static String transform(final String permissions) { IGNORE(false, false) { @Override public boolean matches( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { return true; } }; diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/SimpleFileMetadataChangeDetector.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/SimpleFileMetadataChangeDetector.java index 599e221..f208da6 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/SimpleFileMetadataChangeDetector.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/SimpleFileMetadataChangeDetector.java @@ -21,15 +21,15 @@ public class SimpleFileMetadataChangeDetector extends BaseFileMetadataChangeDete * @param permissionStrategy The permission comparison strategy */ protected SimpleFileMetadataChangeDetector( - @NotNull final Map> filesFromManifests, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NotNull Map> filesFromManifests, + final @Nullable PermissionComparisonStrategy permissionStrategy) { super(filesFromManifests, permissionStrategy); } @Override public boolean hasContentChanged( - @NonNull final FileMetadata previousMetadata, - @NonNull final FileMetadata currentMetadata) { + final @NonNull FileMetadata previousMetadata, + final @NonNull FileMetadata currentMetadata) { final var isContentSource = previousMetadata.getFileType().isContentSource() || currentMetadata.getFileType().isContentSource(); final var hasContentChanged = !Objects.equals(previousMetadata.getFileType(), currentMetadata.getFileType()) || !Objects.equals(previousMetadata.getOriginalSizeBytes(), currentMetadata.getOriginalSizeBytes()) @@ -39,12 +39,11 @@ public boolean hasContentChanged( } @Override - protected Long getPrimaryContentCriteria(@NotNull final FileMetadata metadata) { + protected Long getPrimaryContentCriteria(final @NotNull FileMetadata metadata) { return metadata.getOriginalSizeBytes(); } - @NotNull - private static String getFileName(@NotNull final FileMetadata fileMetadata) { + private static @NotNull String getFileName(final @NotNull FileMetadata fileMetadata) { return fileMetadata.getAbsolutePath().getFileName(); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupJobConfiguration.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupJobConfiguration.java index be9b467..5402073 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupJobConfiguration.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupJobConfiguration.java @@ -40,10 +40,9 @@ public class BackupJobConfiguration { * increment was saved. As a side effect, this property is ignored during the first execution * after each configuration change. */ - @NonNull @EqualsAndHashCode.Exclude @JsonProperty("backup_type") - private final BackupType backupType; + private final @NonNull BackupType backupType; /** * The algorithm used for hash calculations before and after archival. Useful for data * integrity verifications. @@ -51,18 +50,16 @@ public class BackupJobConfiguration { * NOTE: A change of this value requires a {@link BackupType#FULL} backup as the previous * increments cannot use a different hash algorithm. */ - @NonNull @JsonProperty("hash_algorithm") - private final HashAlgorithm hashAlgorithm; + private final @NonNull HashAlgorithm hashAlgorithm; /** * The algorithm used for compression before writing the archived stream to the file system. *

* NOTE: A change of this value requires a {@link BackupType#FULL} backup as the previous * increments cannot use a different hash algorithm. */ - @NonNull @JsonProperty("compression_algorithm") - private final CompressionAlgorithm compression; + private final @NonNull CompressionAlgorithm compression; /** * The public key of an RSA key pair used for encryption. The files will be encrypted using * automatically generated AES keys (DEK) which will be encrypted using the RSA public key @@ -81,45 +78,40 @@ public class BackupJobConfiguration { * NOTE: A change of this value requires a {@link BackupType#FULL} backup as the previous * increments cannot use a different duplicate handling strategy. */ - @NonNull @JsonProperty("duplicate_strategy") - private final DuplicateHandlingStrategy duplicateStrategy; + private final @NonNull DuplicateHandlingStrategy duplicateStrategy; /** * The desired maximum chunk size for the backup archive part. *

* NOTE: Using 0 means that the archive won't be chunked. */ - @Positive @Builder.Default @EqualsAndHashCode.Exclude @JsonProperty("chunk_size_mebibyte") - private final int chunkSizeMebibyte = ONE_HUNDRED_GIBIBYTE; + private final @Positive int chunkSizeMebibyte = ONE_HUNDRED_GIBIBYTE; /** * The prefix of the backup file names. *

* NOTE: A change of this value requires a {@link BackupType#FULL} backup as the previous * increments cannot use a different duplicate handling strategy. */ - @NonNull @FileNamePrefix @JsonProperty("file_name_prefix") - private final String fileNamePrefix; + private final @NonNull String fileNamePrefix; /** * The destination where the backup files will be saved. *

* NOTE: A change of this value requires a {@link BackupType#FULL} backup as the metadata of the * previous increments must be found in the destination in order to calculate changes. */ - @NonNull @JsonProperty("destination_directory") - private final Path destinationDirectory; + private final @NonNull Path destinationDirectory; /** * The source files we want to archive. */ - @Valid - @Size(min = 1) - @NonNull @EqualsAndHashCode.Exclude @JsonProperty("sources") - private final Set sources; + private final @Valid + @Size(min = 1) + @NonNull Set sources; } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupSource.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupSource.java index 1625d2c..2e917fc 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupSource.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/BackupSource.java @@ -37,10 +37,9 @@ public class BackupSource { /** * The path we want to back up. Can be file or directory. */ - @Valid - @NonNull @JsonProperty("path") - private final BackupPath path; + private final @Valid + @NonNull BackupPath path; /** * Optional include patterns for filtering the contents. Uses {@link java.nio.file.PathMatcher} * with "glob" syntax relative to the value of the path field. diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTarget.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTarget.java index 3f23bbf..df8786c 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTarget.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTarget.java @@ -20,7 +20,7 @@ public record RestoreTarget(BackupPath backupPath, Path restorePath) { * @param archivedFilePath the original path of a file which was archived during the backup * @return whether the given path matches the backup path */ - public boolean matchesArchivedFile(@NonNull final BackupPath archivedFilePath) { + public boolean matchesArchivedFile(final @NonNull BackupPath archivedFilePath) { return archivedFilePath.equals(backupPath) || archivedFilePath.startsWith(backupPath); } @@ -30,7 +30,7 @@ public boolean matchesArchivedFile(@NonNull final BackupPath archivedFilePath) { * @param filePath the original path * @return the mapped path where the file should be restored to */ - public Path mapBackupPathToRestorePath(@NotNull final BackupPath filePath) { + public Path mapBackupPathToRestorePath(final @NotNull BackupPath filePath) { if (!matchesArchivedFile(filePath)) { throw new IllegalArgumentException("The given path is not a child of the backup path"); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTargets.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTargets.java index 5a17fab..9e07764 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTargets.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTargets.java @@ -20,8 +20,7 @@ public record RestoreTargets(@NonNull Set restoreTargets) { * @param originalPath the original path * @return the restore path */ - @NotNull - public Path mapToRestorePath(@NonNull final BackupPath originalPath) { + public @NotNull Path mapToRestorePath(final @NonNull BackupPath originalPath) { return restoreTargets.stream() .filter(restoreTarget -> restoreTarget.matchesArchivedFile(originalPath)) .findFirst() diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTask.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTask.java index 89a31f3..9453633 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTask.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/RestoreTask.java @@ -19,8 +19,7 @@ public class RestoreTask { /** * Defines the target directories to restore files to. */ - @NonNull - private final RestoreTargets restoreTargets; + private final @NonNull RestoreTargets restoreTargets; /** * The number of threads to use for parallel restore. */ diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/enums/DuplicateHandlingStrategy.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/enums/DuplicateHandlingStrategy.java index 597e8d1..a68042a 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/enums/DuplicateHandlingStrategy.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/config/enums/DuplicateHandlingStrategy.java @@ -37,7 +37,7 @@ public Function fileGroupingFunctionForHash(final @NonNull * @param hashAlgorithm the hash algorithm * @return the grouping function */ - public Function fileGroupingFunctionForHash(@NonNull final HashAlgorithm hashAlgorithm) { + public Function fileGroupingFunctionForHash(final @NonNull HashAlgorithm hashAlgorithm) { return fileMetadata -> fileMetadata.getId().toString(); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionController.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionController.java index 74fe534..b71ad0b 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionController.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionController.java @@ -4,15 +4,19 @@ import com.github.nagyesta.filebarj.core.common.ManifestManagerImpl; import com.github.nagyesta.filebarj.core.model.BackupIncrementManifest; import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.progress.ObservableProgressTracker; +import com.github.nagyesta.filebarj.core.progress.ProgressTracker; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.jetbrains.annotations.Nullable; import java.nio.file.Path; -import java.security.PrivateKey; import java.util.Comparator; +import java.util.List; import java.util.SortedMap; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.DELETE_OBSOLETE_FILES; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.LOAD_MANIFESTS; + /** * Controller for the backup increment deletion task. */ @@ -22,22 +26,20 @@ public class IncrementDeletionController { private final SortedMap manifests; private final @NonNull Path backupDirectory; private final ManifestManager manifestManager; + private final ProgressTracker progressTracker; /** * Creates a new instance and initializes it for the specified job. * - * @param backupDirectory the directory where the backup files are located - * @param fileNamePrefix the prefix of the backup file names - * @param kek The key encryption key we want to use to decrypt the files (optional). - * If null, no decryption will be performed. + * @param parameters The parameters. */ public IncrementDeletionController( - @NonNull final Path backupDirectory, - @NonNull final String fileNamePrefix, - @Nullable final PrivateKey kek) { - this.manifestManager = new ManifestManagerImpl(); - this.backupDirectory = backupDirectory; - this.manifests = this.manifestManager.loadAll(this.backupDirectory, fileNamePrefix, kek); + final @NonNull IncrementDeletionParameters parameters) { + this.progressTracker = new ObservableProgressTracker(List.of(LOAD_MANIFESTS, DELETE_OBSOLETE_FILES)); + progressTracker.registerListener(parameters.getProgressListener()); + this.manifestManager = new ManifestManagerImpl(progressTracker); + this.backupDirectory = parameters.getBackupDirectory(); + this.manifests = this.manifestManager.loadAll(this.backupDirectory, parameters.getFileNamePrefix(), parameters.getKek()); } /** @@ -57,11 +59,14 @@ public void deleteIncrementsUntilNextFullBackupAfter(final long startingWithEpoc if (incrementsStartingWithThreshold.get(0).getStartTimeUtcEpochSeconds() != startingWithEpochSeconds) { throw new IllegalArgumentException("Unable to find backup which started at: " + startingWithEpochSeconds); } + progressTracker.estimateStepSubtotal(DELETE_OBSOLETE_FILES, incrementsStartingWithThreshold.size()); for (final var current : incrementsStartingWithThreshold) { if (current.getStartTimeUtcEpochSeconds() > startingWithEpochSeconds && current.getBackupType() == BackupType.FULL) { break; } manifestManager.deleteIncrement(backupDirectory, current); + progressTracker.recordProgressInSubSteps(DELETE_OBSOLETE_FILES); } + progressTracker.completeStep(DELETE_OBSOLETE_FILES); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionParameters.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionParameters.java new file mode 100644 index 0000000..70e936d --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionParameters.java @@ -0,0 +1,21 @@ +package com.github.nagyesta.filebarj.core.delete; + +import com.github.nagyesta.filebarj.core.progress.LoggingProgressListener; +import com.github.nagyesta.filebarj.core.progress.ProgressListener; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.file.Path; +import java.security.PrivateKey; + +@Data +@Builder +public class IncrementDeletionParameters { + private final @NonNull Path backupDirectory; + private final @NonNull String fileNamePrefix; + private final @Nullable PrivateKey kek; + @Builder.Default + private final @NonNull ProgressListener progressListener = LoggingProgressListener.INSTANCE; +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionController.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionController.java index 96adba8..8ad54aa 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionController.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionController.java @@ -5,14 +5,15 @@ import com.github.nagyesta.filebarj.core.inspect.worker.ManifestToSummaryConverter; import com.github.nagyesta.filebarj.core.inspect.worker.TabSeparatedBackupContentExporter; import com.github.nagyesta.filebarj.core.model.BackupIncrementManifest; +import com.github.nagyesta.filebarj.core.progress.ObservableProgressTracker; +import com.github.nagyesta.filebarj.core.progress.ProgressStep; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.jetbrains.annotations.Nullable; import java.io.PrintStream; import java.nio.file.Path; -import java.security.PrivateKey; import java.time.Instant; +import java.util.List; import java.util.SortedMap; /** @@ -26,17 +27,15 @@ public class IncrementInspectionController { /** * Creates a new instance and initializes it for the specified job. * - * @param backupDirectory the directory where the backup files are located - * @param fileNamePrefix the prefix of the backup file names - * @param kek The key encryption key we want to use to decrypt the files (optional). - * If null, no decryption will be performed. + * @param parameters The parameters. */ public IncrementInspectionController( - @NonNull final Path backupDirectory, - @NonNull final String fileNamePrefix, - @Nullable final PrivateKey kek) { - final ManifestManager manifestManager = new ManifestManagerImpl(); - this.manifests = manifestManager.loadAll(backupDirectory, fileNamePrefix, kek); + final @NonNull InspectParameters parameters) { + final var progressTracker = new ObservableProgressTracker(List.of(ProgressStep.LOAD_MANIFESTS)); + progressTracker.registerListener(parameters.getProgressListener()); + final ManifestManager manifestManager = new ManifestManagerImpl(progressTracker); + this.manifests = manifestManager + .loadAll(parameters.getBackupDirectory(), parameters.getFileNamePrefix(), parameters.getKek()); } /** @@ -47,7 +46,7 @@ public IncrementInspectionController( */ public void inspectContent( final long latestStartTimeEpochSeconds, - @NonNull final Path outputFile) { + final @NonNull Path outputFile) { final var selectedUpperLimit = Math.min(Instant.now().getEpochSecond(), latestStartTimeEpochSeconds); final var relevant = this.manifests.headMap(selectedUpperLimit + 1).lastKey(); log.info("Found increment with start timestamp: {}", relevant); @@ -60,7 +59,7 @@ public void inspectContent( * * @param outputStream the output stream */ - public void inspectIncrements(@NonNull final PrintStream outputStream) { + public void inspectIncrements(final @NonNull PrintStream outputStream) { final var manifestToSummaryConverter = new ManifestToSummaryConverter(); manifests.forEach((key, value) -> outputStream.println(manifestToSummaryConverter.convertToSummaryString(value))); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/InspectParameters.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/InspectParameters.java new file mode 100644 index 0000000..371a1d1 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/pipeline/InspectParameters.java @@ -0,0 +1,21 @@ +package com.github.nagyesta.filebarj.core.inspect.pipeline; + +import com.github.nagyesta.filebarj.core.progress.LoggingProgressListener; +import com.github.nagyesta.filebarj.core.progress.ProgressListener; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.file.Path; +import java.security.PrivateKey; + +@Data +@Builder +public class InspectParameters { + private final @NonNull Path backupDirectory; + private final @NonNull String fileNamePrefix; + private final @Nullable PrivateKey kek; + @Builder.Default + private final @NonNull ProgressListener progressListener = LoggingProgressListener.INSTANCE; +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java index 47e10b9..55ddea5 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java @@ -36,8 +36,7 @@ public String convertToSummaryString(final @NonNull BackupIncrementManifest mani + "\tCompression: " + manifest.getConfiguration().getCompression().name(); } - @NotNull - private String getFormattedType(@NotNull final BackupIncrementManifest manifest) { + private @NotNull String getFormattedType(final @NotNull BackupIncrementManifest manifest) { if (manifest.getBackupType() == BackupType.INCREMENTAL) { return manifest.getBackupType().name(); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/TabSeparatedBackupContentExporter.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/TabSeparatedBackupContentExporter.java index b732fb3..633626f 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/TabSeparatedBackupContentExporter.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/TabSeparatedBackupContentExporter.java @@ -27,8 +27,8 @@ public class TabSeparatedBackupContentExporter { * @param outputFile The output file. */ public void writeManifestContent( - @NonNull final BackupIncrementManifest manifest, - @NonNull final Path outputFile) { + final @NonNull BackupIncrementManifest manifest, + final @NonNull Path outputFile) { try (var stream = new FileOutputStream(outputFile.toFile()); var buffered = new BufferedOutputStream(stream); var writer = new OutputStreamWriter(buffered, StandardCharsets.UTF_8)) { diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java index 54f1d89..e512f4a 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java @@ -5,15 +5,18 @@ import com.github.nagyesta.filebarj.core.common.ManifestManagerImpl; import com.github.nagyesta.filebarj.core.model.*; import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.progress.ObservableProgressTracker; +import com.github.nagyesta.filebarj.core.progress.ProgressStep; +import com.github.nagyesta.filebarj.core.progress.ProgressTracker; import com.github.nagyesta.filebarj.core.util.LogUtil; import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiveFileInputStreamSource; import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiverFileOutputStream; import com.github.nagyesta.filebarj.io.stream.BarjCargoInputStreamConfiguration; import com.github.nagyesta.filebarj.io.stream.BarjCargoOutputStreamConfiguration; +import com.github.nagyesta.filebarj.io.stream.enums.FileType; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.nio.file.Path; @@ -22,11 +25,14 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.*; + /** * Controller implementation for the merge process. */ @Slf4j public class MergeController { + private static final List PROGRESS_STEPS = List.of(LOAD_MANIFESTS, MERGE, DELETE_OBSOLETE_FILES); private final ManifestManager manifestManager; private final RestoreManifest mergedManifest; private final SortedMap selectedManifests; @@ -34,33 +40,25 @@ public class MergeController { private final PrivateKey kek; private final Path backupDirectory; private final ReentrantLock executionLock = new ReentrantLock(); + private final ProgressTracker progressTracker; /** * Creates a new instance and initializes it for the merge. * - * @param backupDirectory the directory where the backup files are located - * @param fileNamePrefix the prefix of the backup file names - * @param kek The key encryption key we want to use to decrypt and encrypt - * the files (optional). - * @param rangeStartEpochSeconds the start of the range to merge (inclusive) - * @param rangeEndEpochSeconds the end of the range to merge (inclusive) + * @param mergeParameters The parameters. */ public MergeController( - @NonNull final Path backupDirectory, - @NonNull final String fileNamePrefix, - @Nullable final PrivateKey kek, - final long rangeStartEpochSeconds, - final long rangeEndEpochSeconds) { - if (rangeEndEpochSeconds <= rangeStartEpochSeconds) { - throw new IllegalArgumentException( - "Invalid range selected for merge! start=" + rangeEndEpochSeconds + ", end=" + rangeStartEpochSeconds); - } - this.kek = kek; - this.backupDirectory = backupDirectory; - manifestManager = new ManifestManagerImpl(); + final @NonNull MergeParameters mergeParameters) { + mergeParameters.assertValid(); + this.kek = mergeParameters.getKek(); + this.backupDirectory = mergeParameters.getBackupDirectory(); + this.progressTracker = new ObservableProgressTracker(PROGRESS_STEPS); + progressTracker.registerListener(mergeParameters.getProgressListener()); + manifestManager = new ManifestManagerImpl(progressTracker); log.info("Loading backup manifests for merge from: {}", backupDirectory); - final var manifests = manifestManager.loadAll(this.backupDirectory, fileNamePrefix, kek); - selectedManifests = filterToSelection(manifests, rangeStartEpochSeconds, rangeEndEpochSeconds); + final var manifests = manifestManager.loadAll(this.backupDirectory, mergeParameters.getFileNamePrefix(), kek); + selectedManifests = filterToSelection(manifests, + mergeParameters.getRangeStartEpochSeconds(), mergeParameters.getRangeEndEpochSeconds()); log.info("Selected {} manifests", selectedManifests.size()); manifestsToMerge = keepManifestsSinceLastFullBackupOfTheSelection(selectedManifests); mergedManifest = manifestManager.mergeForRestore(manifestsToMerge); @@ -79,12 +77,19 @@ public MergeController( public BackupIncrementManifest execute(final boolean deleteObsoleteFiles) { executionLock.lock(); try { + progressTracker.reset(); + progressTracker.skipStep(LOAD_MANIFESTS); + if (!deleteObsoleteFiles) { + progressTracker.skipStep(DELETE_OBSOLETE_FILES); + } final var result = mergeBackupContent(); manifestManager.persist(result, backupDirectory); if (deleteObsoleteFiles) { log.info("Deleting obsolete files from backup directory: {}", backupDirectory); + progressTracker.estimateStepSubtotal(DELETE_OBSOLETE_FILES, selectedManifests.size()); selectedManifests.values().forEach(manifest -> { manifestManager.deleteIncrement(backupDirectory, manifest); + progressTracker.recordProgressInSubSteps(DELETE_OBSOLETE_FILES); }); } return result; @@ -93,8 +98,7 @@ public BackupIncrementManifest execute(final boolean deleteObsoleteFiles) { } } - @NotNull - private BackupIncrementManifest mergeBackupContent() { + private @NotNull BackupIncrementManifest mergeBackupContent() { final var lastManifest = manifestsToMerge.get(manifestsToMerge.lastKey()); final var firstManifest = manifestsToMerge.get(manifestsToMerge.firstKey()); final var result = BackupIncrementManifest.builder() @@ -111,6 +115,8 @@ private BackupIncrementManifest mergeBackupContent() { .files(mergedManifest.getFilesOfLastManifest()) .archivedEntries(mergedManifest.getArchivedEntriesOfLastManifest()) .build(); + final var totalEntries = (long) result.getArchivedEntries().values().size(); + progressTracker.estimateStepSubtotal(MERGE, totalEntries); final var outputStreamConfiguration = BarjCargoOutputStreamConfiguration.builder() .compressionFunction(result.getConfiguration().getCompression()::decorateOutputStream) .prefix(result.getFileNamePrefix()) @@ -128,6 +134,7 @@ private BackupIncrementManifest mergeBackupContent() { mergeContentEntriesFromManifest(currentManifest, result, output); } output.close(); + progressTracker.completeStep(MERGE); result.setIndexFileName(output.getIndexFileWritten().getFileName().toString()); result.setDataFileNames(output.getDataFilesWritten().stream().map(Path::getFileName).map(Path::toString).toList()); } catch (final IOException e) { @@ -136,9 +143,8 @@ private BackupIncrementManifest mergeBackupContent() { return result; } - @NotNull - private SortedMap filterToSelection( - @NotNull final SortedMap manifests, + private @NotNull SortedMap filterToSelection( + final @NotNull SortedMap manifests, final long rangeStartEpochSeconds, final long rangeEndEpochSeconds) { if (!manifests.containsKey(rangeStartEpochSeconds)) { @@ -150,9 +156,8 @@ private SortedMap filterToSelection( return manifests.headMap(rangeEndEpochSeconds + 1).tailMap(rangeStartEpochSeconds); } - @NotNull - private SortedMap keepManifestsSinceLastFullBackupOfTheSelection( - @NotNull final SortedMap selected) { + private @NotNull SortedMap keepManifestsSinceLastFullBackupOfTheSelection( + final @NotNull SortedMap selected) { final SortedMap result = new TreeMap<>(); final var inReverseOrder = selected.values().stream() .sorted(Comparator.comparingLong(BackupIncrementManifest::getStartTimeUtcEpochSeconds).reversed()) @@ -191,6 +196,9 @@ private void mergeContentEntriesFromManifest( final var currentEntry = iterator.next(); if (relevantEntries.contains(currentEntry.getPath())) { output.mergeEntity(currentEntry.getEntityIndex(), currentEntry.getRawContentAndMetadata()); + if (currentEntry.getFileType() != FileType.DIRECTORY) { + progressTracker.recordProgressInSubSteps(MERGE); + } } else { currentEntry.skipContent(); currentEntry.skipMetadata(); @@ -199,8 +207,7 @@ private void mergeContentEntriesFromManifest( } } - @NonNull - private Set filterEntities( + private @NonNull Set filterEntities( final BackupIncrementManifest currentManifest, final BackupIncrementManifest result) { return result.getArchivedEntries().values().stream() diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeParameters.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeParameters.java new file mode 100644 index 0000000..d2f3d22 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeParameters.java @@ -0,0 +1,30 @@ +package com.github.nagyesta.filebarj.core.merge; + +import com.github.nagyesta.filebarj.core.progress.LoggingProgressListener; +import com.github.nagyesta.filebarj.core.progress.ProgressListener; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.file.Path; +import java.security.PrivateKey; + +@Data +@Builder +public class MergeParameters { + private final @NonNull Path backupDirectory; + private final @NonNull String fileNamePrefix; + private final @Nullable PrivateKey kek; + private final long rangeStartEpochSeconds; + private final long rangeEndEpochSeconds; + @Builder.Default + private final @NonNull ProgressListener progressListener = LoggingProgressListener.INSTANCE; + + public void assertValid() { + if (rangeEndEpochSeconds <= rangeStartEpochSeconds) { + throw new IllegalArgumentException( + "Invalid range selected for merge! start=" + rangeEndEpochSeconds + ", end=" + rangeStartEpochSeconds); + } + } +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/AppVersion.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/AppVersion.java index 4bddb0e..5288d87 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/AppVersion.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/AppVersion.java @@ -61,8 +61,7 @@ public String toJsonValue() { return major + "." + minor + "." + patch; } - @NonNull - private static String getDefaultVersion() { + private static @NonNull String getDefaultVersion() { try (var stream = AppVersion.class.getResourceAsStream("/file-barj-component.version"); var reader = new InputStreamReader(Objects.requireNonNull(stream), StandardCharsets.UTF_8); var buffered = new LineNumberReader(reader)) { @@ -72,7 +71,7 @@ private static String getDefaultVersion() { } } - public int compareTo(@NotNull final AppVersion appVersion) { + public int compareTo(final @NotNull AppVersion appVersion) { return Comparator.comparing(AppVersion::major) .thenComparing(AppVersion::minor) .thenComparing(AppVersion::patch) diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchiveEntryLocator.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchiveEntryLocator.java index 550c5b9..cd5776a 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchiveEntryLocator.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchiveEntryLocator.java @@ -28,15 +28,13 @@ public final class ArchiveEntryLocator { /** * The backup increment containing the entry. */ - @PositiveOrZero @JsonProperty("backup_increment") - private final int backupIncrement; + private final @PositiveOrZero int backupIncrement; /** * The name of the entry (file) stored within the archive. */ - @NonNull @JsonProperty("entry_name") - private final UUID entryName; + private final @NonNull UUID entryName; /** * Returns the path to the entry. @@ -44,8 +42,7 @@ public final class ArchiveEntryLocator { * @return the path */ @JsonIgnore - @NotNull - public String asEntryPath() { + public @NotNull String asEntryPath() { return String.format("/%s/%s", backupIncrement, entryName); } @@ -55,7 +52,7 @@ public String asEntryPath() { * @param entryPath the path * @return the locator */ - public static ArchiveEntryLocator fromEntryPath(@NonNull final String entryPath) { + public static ArchiveEntryLocator fromEntryPath(final @NonNull String entryPath) { final var matcher = PATH_REGEX.matcher(entryPath); if (matcher.matches()) { return ArchiveEntryLocator.builder() diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchivedFileMetadata.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchivedFileMetadata.java index be5d5ac..59bb562 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchivedFileMetadata.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/ArchivedFileMetadata.java @@ -25,16 +25,14 @@ public class ArchivedFileMetadata { /** * The unique Id of the metadata record. */ - @NonNull @JsonProperty("id") - private final UUID id; + private final @NonNull UUID id; /** * The location where the archived file contents are stored. */ - @Valid - @NonNull @JsonProperty("archive_location") - private final ArchiveEntryLocator archiveLocation; + private final @Valid + @NonNull ArchiveEntryLocator archiveLocation; /** * The hash of the archived content. */ @@ -49,10 +47,9 @@ public class ArchivedFileMetadata { * The Ids of the original files which are archived by the current entry. If multiple Ids are * listed, then duplicates where eliminated. */ - @Size(min = 1) - @NonNull @JsonProperty("files") - private Set files; + private @Size(min = 1) + @NonNull Set files; /** * Copies the metadata except the Id and the files. diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupIncrementManifest.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupIncrementManifest.java index e02f37e..cc985bf 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupIncrementManifest.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupIncrementManifest.java @@ -31,74 +31,64 @@ public class BackupIncrementManifest extends EncryptionKeyStore { /** * The version number of the app that generated the manifest. */ - @Valid - @NonNull @JsonProperty("app_version") - private AppVersion appVersion; + private @Valid + @NonNull AppVersion appVersion; /** * The time when the backup process was started in UTC epoch * seconds. */ - @Positive @PastOrPresentEpochSeconds @JsonProperty("start_time_utc_epoch_seconds") - private long startTimeUtcEpochSeconds; + private @Positive long startTimeUtcEpochSeconds; /** * The file name prefix used by the backup archives. */ - @NonNull @FileNamePrefix @JsonProperty("file_name_prefix") - private String fileNamePrefix; + private @NonNull String fileNamePrefix; /** * The type of the backup. */ - @NonNull @JsonProperty("backup_type") - private BackupType backupType; + private @NonNull BackupType backupType; /** * The OS of the backup. */ - @NotNull(groups = ValidationRules.Created.class) - @NotBlank(groups = ValidationRules.Created.class) @JsonProperty("operating_system") - private String operatingSystem; + private @NotNull(groups = ValidationRules.Created.class) + @NotBlank(groups = ValidationRules.Created.class) String operatingSystem; /** * The snapshot of the backup configuration at the time of backup. */ - @Valid - @NonNull @JsonProperty("job_configuration") - private BackupJobConfiguration configuration; + private @Valid + @NonNull BackupJobConfiguration configuration; /** * The map of matching files identified during backup keyed by Id. */ - @Valid - @Size(max = 0, groups = ValidationRules.Created.class) - @Size(min = 1, groups = ValidationRules.Persisted.class) @JsonProperty("files") - private Map files; + private @Valid + @Size(max = 0, groups = ValidationRules.Created.class) + @Size(min = 1, groups = ValidationRules.Persisted.class) Map files; /** * The map of archive entries saved during backup keyed by Id. */ - @Valid - @Size(max = 0, groups = ValidationRules.Created.class) @JsonProperty("archive_entries") - private Map archivedEntries; + private @Valid + @Size(max = 0, groups = ValidationRules.Created.class) Map archivedEntries; /** * The name of the index file. */ - @Null(groups = ValidationRules.Created.class) - @NotNull(groups = ValidationRules.Persisted.class) - @NotBlank(groups = ValidationRules.Persisted.class) @JsonProperty("index_file_name") - private String indexFileName; + private @Null(groups = ValidationRules.Created.class) + @NotNull(groups = ValidationRules.Persisted.class) + @NotBlank(groups = ValidationRules.Persisted.class) String indexFileName; /** * The names of the data files. */ - @Null(groups = ValidationRules.Created.class) - @NotNull(groups = ValidationRules.Persisted.class) - @Size(min = 1, groups = ValidationRules.Persisted.class) @JsonProperty("data_file_names") - private List dataFileNames; + private @Null(groups = ValidationRules.Created.class) + @NotNull(groups = ValidationRules.Persisted.class) + @Size(min = 1, groups = ValidationRules.Persisted.class) List dataFileNames; } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupPath.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupPath.java index 1d1d928..93380b9 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupPath.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/BackupPath.java @@ -32,8 +32,7 @@ public final class BackupPath implements Comparable { private static final Pattern UNIX_FILE_SCHEME = Pattern .compile("^" + FILE_SCHEME_DOUBLE_SLASH + "(?<" + PATH_GROUP + ">/[^:]*)$"); private static final Set PATTERNS = Set.of(WINDOWS_FILE_SCHEME, UNIX_FILE_SCHEME); - @NotBlank - private final String path; + private final @NotBlank String path; /** * Creates a new instance and sets the path. @@ -43,7 +42,7 @@ public final class BackupPath implements Comparable { * @throws IllegalArgumentException if the uri does not start with file:// */ @JsonCreator - public static BackupPath fromUri(@NonNull final String uri) { + public static BackupPath fromUri(final @NonNull String uri) { return PATTERNS.stream() .map(p -> { final var matcher = p.matcher(uri); @@ -64,7 +63,7 @@ public static BackupPath fromUri(@NonNull final String uri) { * @param path the absolute OS path * @return the new instance */ - public static BackupPath ofPathAsIs(@NonNull final String path) { + public static BackupPath ofPathAsIs(final @NonNull String path) { return new BackupPath(normalizePath(path)); } @@ -75,7 +74,7 @@ public static BackupPath ofPathAsIs(@NonNull final String path) { * @param suffix the suffix of the path * @return the new instance */ - public static BackupPath of(@NonNull final Path prefix, @NonNull final String suffix) { + public static BackupPath of(final @NonNull Path prefix, final @NonNull String suffix) { return BackupPath.ofPathAsIs(prefix.toAbsolutePath() + UNIX_SEPARATOR + suffix); } @@ -87,7 +86,7 @@ public static BackupPath of(@NonNull final Path prefix, @NonNull final String su * @param suffix the suffix of the path * @return the new instance */ - public static BackupPath of(@NonNull final Path prefix, @NonNull final String middle, @NonNull final String suffix) { + public static BackupPath of(final @NonNull Path prefix, final @NonNull String middle, final @NonNull String suffix) { return BackupPath.of(prefix, middle + UNIX_SEPARATOR + suffix); } @@ -97,7 +96,7 @@ public static BackupPath of(@NonNull final Path prefix, @NonNull final String mi * @param path the OS path * @return the new instance */ - public static BackupPath of(@NonNull final Path path) { + public static BackupPath of(final @NonNull Path path) { return new BackupPath(toStringWithUnixSeparator(path)); } @@ -129,7 +128,7 @@ public Path toOsPath() { } @Override - public int compareTo(@NotNull final BackupPath o) { + public int compareTo(final @NotNull BackupPath o) { return PATH_COMPARATOR.compare(this, o); } @@ -186,7 +185,7 @@ public BackupPath getParent() { * @param child the name of the child we want to resolve * @return the resolved path */ - public BackupPath resolve(@NonNull final String child) { + public BackupPath resolve(final @NonNull String child) { return BackupPath.ofPathAsIs(path + UNIX_SEPARATOR + child); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/EncryptionKeyStore.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/EncryptionKeyStore.java index 12bc1be..afda281 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/EncryptionKeyStore.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/EncryptionKeyStore.java @@ -44,11 +44,10 @@ public class EncryptionKeyStore { * by 1. A manifest can contain more numbers if the backup increments were merged (consolidated) * into a single archive. */ - @Valid - @Size(min = 1) - @NonNull @JsonProperty("backup_versions") - private SortedSet<@PositiveOrZero Integer> versions; + private @Valid + @Size(min = 1) + @NonNull SortedSet<@PositiveOrZero Integer> versions; /** * The byte arrays containing the data encryption keys (DEK) encrypted with the key encryption * key (KEK). @@ -73,10 +72,9 @@ public class EncryptionKeyStore { * @param version The version of the backup * @return The decrypted DEK */ - @NotNull @JsonIgnore - public SecretKey dataIndexDecryptionKey( - @NonNull final PrivateKey kekPrivateKey, final int version) { + public @NotNull SecretKey dataIndexDecryptionKey( + final @NonNull PrivateKey kekPrivateKey, final int version) { return decryptionKeyByIndex(kekPrivateKey, version, 0); } @@ -88,11 +86,10 @@ public SecretKey dataIndexDecryptionKey( * @param entryLocator The name of the entry inside the archive * @return The decrypted DEK */ - @NotNull @JsonIgnore - public SecretKey dataDecryptionKey( - @NonNull final PrivateKey kekPrivateKey, - @NonNull final ArchiveEntryLocator entryLocator) { + public @NotNull SecretKey dataDecryptionKey( + final @NonNull PrivateKey kekPrivateKey, + final @NonNull ArchiveEntryLocator entryLocator) { final var index = selectKeyIndex(entryLocator.getEntryName()); return decryptionKeyByIndex(kekPrivateKey, entryLocator.getBackupIncrement(), index); } @@ -104,9 +101,8 @@ public SecretKey dataDecryptionKey( * @param kekPublicKey The KEK we will use for encrypting the DEKs. * @return The generated DEKs. */ - @NotNull @JsonIgnore - public Map generateDataEncryptionKeys(@NonNull final PublicKey kekPublicKey) { + public @NotNull Map generateDataEncryptionKeys(final @NonNull PublicKey kekPublicKey) { final Map keys = new HashMap<>(); final Map encodedKeys = new HashMap<>(); for (var i = 0; i < DEK_COUNT; i++) { @@ -131,9 +127,8 @@ public Map generateDataEncryptionKeys(@NonNull final PublicK * * @return The optional key */ - @Nullable @JsonIgnore - public SecretKey dataIndexEncryptionKey() { + public @Nullable SecretKey dataIndexEncryptionKey() { return Optional.ofNullable(rawEncryptionKeys) .map(map -> map.get(versions.first())) .map(map -> map.get(0)) @@ -146,9 +141,8 @@ public SecretKey dataIndexEncryptionKey() { * @param archiveEntryName The name of the entry inside the archive * @return The optional key */ - @Nullable @JsonIgnore - public SecretKey dataEncryptionKey(@NonNull final ArchiveEntryLocator archiveEntryName) { + public @Nullable SecretKey dataEncryptionKey(final @NonNull ArchiveEntryLocator archiveEntryName) { final var index = selectKeyIndex(archiveEntryName.getEntryName()); return Optional.ofNullable(rawEncryptionKeys) .map(map -> map.get(archiveEntryName.getBackupIncrement())) @@ -156,7 +150,7 @@ public SecretKey dataEncryptionKey(@NonNull final ArchiveEntryLocator archiveEnt .orElse(null); } - private SecretKey decryptionKeyByIndex(@NotNull final PrivateKey kekPrivateKey, final int version, final int index) { + private SecretKey decryptionKeyByIndex(final @NotNull PrivateKey kekPrivateKey, final int version, final int index) { if (rawEncryptionKeys == null) { rawEncryptionKeys = new ConcurrentHashMap<>(); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/FileMetadata.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/FileMetadata.java index fba8896..5feebac 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/FileMetadata.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/FileMetadata.java @@ -32,104 +32,88 @@ public class FileMetadata implements Comparable { /** * The unique Id of the file. */ - @NonNull @JsonProperty("id") - private final UUID id; - @Nullable + private final @NonNull UUID id; @JsonProperty("file_system_key") - private final String fileSystemKey; + private final @Nullable String fileSystemKey; /** * The absolute path where the file is located. */ - @Valid - @NonNull @JsonProperty("path") - private final BackupPath absolutePath; + private final @Valid + @NonNull BackupPath absolutePath; /** * The hash of the file content using the configured hash algorithm. *
* {@link com.github.nagyesta.filebarj.core.config.BackupJobConfiguration#getHashAlgorithm()} */ - @Nullable @JsonProperty("original_hash") - private final String originalHash; + private final @Nullable String originalHash; /** * The original file size. */ - @NotNull - @PositiveOrZero @JsonProperty("original_size") - private Long originalSizeBytes; + private @NotNull + @PositiveOrZero Long originalSizeBytes; /** * The last modified time of the file using UTC epoch seconds. */ - @NotNull @JsonProperty("last_modified_utc_epoch_seconds") - private Long lastModifiedUtcEpochSeconds; + private @NotNull Long lastModifiedUtcEpochSeconds; /** * The last access time of the file using UTC epoch seconds. */ - @NotNull @JsonProperty("last_accessed_utc_epoch_seconds") - private Long lastAccessedUtcEpochSeconds; + private @NotNull Long lastAccessedUtcEpochSeconds; /** * The creation time of the file using UTC epoch seconds. */ - @NotNull @JsonProperty("created_utc_epoch_seconds") - private Long createdUtcEpochSeconds; + private @NotNull Long createdUtcEpochSeconds; /** * The POSIX permissions of the file. */ - @NotNull - @NotBlank - @Pattern(regexp = "^([r-][w-][x-]){3}$") @JsonProperty("permissions") - private final String posixPermissions; + private final @NotNull + @NotBlank + @Pattern(regexp = "^([r-][w-][x-]){3}$") String posixPermissions; /** * The owner of the file. */ - @NotNull - @NotBlank @JsonProperty("owner") - private final String owner; + private final @NotNull + @NotBlank String owner; /** * The owner group of the file. */ - @NotNull - @NotBlank @JsonProperty("group") - private final String group; + private final @NotNull + @NotBlank String group; /** * The file type (file/directory/symbolic link/other). */ - @NonNull @JsonProperty("file_type") - private final FileType fileType; + private final @NonNull FileType fileType; /** * The hidden status of the file. */ - @NotNull @JsonProperty("hidden") - private Boolean hidden; + private @NotNull Boolean hidden; /** * The detected change status of the file. */ - @NonNull @JsonProperty("status") - private Change status; + private @NonNull Change status; /** * The Id of the archive metadata for the entity storing this file. */ - @Nullable @JsonProperty("archive_metadata_id") - private UUID archiveMetadataId; + private @Nullable UUID archiveMetadataId; /** * An optional error message in case of blocker issues during backup. */ - @Nullable @JsonProperty("error") - private String error; + private @Nullable String error; /** * Streams the content of the file. Verifies that the {@link #fileType} is supported by calling @@ -158,7 +142,7 @@ public void assertContentSource() { } @Override - public int compareTo(@NonNull final FileMetadata o) { + public int compareTo(final @NonNull FileMetadata o) { return getAbsolutePath().compareTo(o.getAbsolutePath()); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/RestoreManifest.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/RestoreManifest.java index 7d41a53..d4711e6 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/RestoreManifest.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/RestoreManifest.java @@ -24,8 +24,7 @@ public class RestoreManifest extends EncryptionKeyStore { /** * The version number of the app that generated the manifest. */ - @NonNull - private final AppVersion maximumAppVersion; + private final @NonNull AppVersion maximumAppVersion; /** * The time when the backup process was started in UTC epoch * seconds. @@ -35,25 +34,20 @@ public class RestoreManifest extends EncryptionKeyStore { * The file name prefix used by the backup archives as keys, mapped to the versions belonging to * that prefix. */ - @NonNull - private final SortedMap> fileNamePrefixes; + private final @NonNull SortedMap> fileNamePrefixes; /** * The snapshot of the backup configuration at the time of backup. */ - @NonNull - private final BackupJobConfiguration configuration; - @NonNull - private final OperatingSystem operatingSystem; + private final @NonNull BackupJobConfiguration configuration; + private final @NonNull OperatingSystem operatingSystem; /** * The map of matching files identified during backup keyed by filename and Id. */ - @NonNull - private final Map> files; + private final @NonNull Map> files; /** * The map of archive entries saved during backup keyed by filename and Id. */ - @NonNull - private final Map> archivedEntries; + private final @NonNull Map> archivedEntries; /** * Returns the data decryption key for the given file name prefix using the private key for @@ -63,7 +57,7 @@ public class RestoreManifest extends EncryptionKeyStore { * @param kekPrivateKey the private key * @return the data decryption key */ - public @NotNull SecretKey dataIndexDecryptionKey(@NotNull final String fileNamePrefix, @NotNull final PrivateKey kekPrivateKey) { + public @NotNull SecretKey dataIndexDecryptionKey(final @NotNull String fileNamePrefix, final @NotNull PrivateKey kekPrivateKey) { return dataIndexDecryptionKey(kekPrivateKey, fileNamePrefixes.get(fileNamePrefix).first()); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/FileType.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/FileType.java index 71b54e4..a3c0de4 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/FileType.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/FileType.java @@ -74,7 +74,7 @@ public InputStream streamContent(final Path path) throws IOException { * @return The file type. */ public static FileType findForAttributes( - @NonNull final BasicFileAttributes attributes) { + final @NonNull BasicFileAttributes attributes) { return Arrays.stream(values()) .filter(f -> f.test.test(attributes)) .findFirst() diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/OperatingSystem.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/OperatingSystem.java index a3ffaa0..f160e20 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/OperatingSystem.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/model/enums/OperatingSystem.java @@ -44,8 +44,7 @@ public enum OperatingSystem { * @param name the OS name * @return the matching constant or UNKNOWN */ - @NotNull - public static OperatingSystem forOsName(@Nullable final String name) { + public static @NotNull OperatingSystem forOsName(final @Nullable String name) { final var osName = Optional.ofNullable(name) .map(String::toLowerCase) .orElse("unknown"); diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/LoggingProgressListener.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/LoggingProgressListener.java new file mode 100644 index 0000000..d002108 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/LoggingProgressListener.java @@ -0,0 +1,24 @@ +package com.github.nagyesta.filebarj.core.progress; + +import lombok.extern.slf4j.Slf4j; + +import java.util.UUID; + +@Slf4j(topic = "progress") +public class LoggingProgressListener implements ProgressListener { + + /** + * The shared instance of the Logging progress listener. + */ + public static final LoggingProgressListener INSTANCE = new LoggingProgressListener(); + + @Override + public UUID getId() { + return UUID.randomUUID(); + } + + @Override + public void onProgressChanged(final int totalProgressPercentage, final int stepProgressPercentage, final String stepName) { + log.info("({}%) {} step {}% complete.", String.format("%3d", totalProgressPercentage), stepName, stepProgressPercentage); + } +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/NoOpProgressTracker.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/NoOpProgressTracker.java new file mode 100644 index 0000000..e6006a8 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/NoOpProgressTracker.java @@ -0,0 +1,56 @@ +package com.github.nagyesta.filebarj.core.progress; + +import org.jetbrains.annotations.NotNull; + +public class NoOpProgressTracker implements ProgressTracker { + + /** + * The shared instance of the No-Op tracker. + */ + public static final NoOpProgressTracker INSTANCE = new NoOpProgressTracker(); + + @Override + public void reset() { + //no-op + } + + @Override + public void reset(final @NotNull ProgressStep step) { + //no-op + } + + @Override + public void estimateStepSubtotal(final @NotNull ProgressStep step, final long totalSubSteps) { + //no-op + } + + @Override + public void recordProgressInSubSteps(final @NotNull ProgressStep step, final long progress) { + //no-op + } + + @Override + public void completeStep(final @NotNull ProgressStep step) { + //no-op + } + + @Override + public void skipStep(final @NotNull ProgressStep step) { + //no-op + } + + @Override + public void completeAll() { + //no-op + } + + @Override + public void assertSupports(final @NotNull ProgressStep step) { + //no-op + } + + @Override + public void registerListener(final @NotNull ProgressListener listener) { + + } +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ObservableProgress.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ObservableProgress.java new file mode 100644 index 0000000..801f52f --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ObservableProgress.java @@ -0,0 +1,6 @@ +package com.github.nagyesta.filebarj.core.progress; + +public interface ObservableProgress { + + void registerListener(ProgressListener listener); +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ObservableProgressTracker.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ObservableProgressTracker.java new file mode 100644 index 0000000..713b4c0 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ObservableProgressTracker.java @@ -0,0 +1,169 @@ +package com.github.nagyesta.filebarj.core.progress; + +import lombok.NonNull; +import org.jetbrains.annotations.NotNull; + +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class ObservableProgressTracker implements ProgressTracker { + + private static final double HUNDRED_PERCENT_DOUBLE = 100.0D; + private static final int COMPLETE = 100; + private static final int NO_PROGRESS = 0; + private static final long SINGLE_STEP = 1L; + + private final List steps; + private final Map subStepTotals; + private final Map listeners = new ConcurrentHashMap<>(); + private final Map completedSubSteps; + private final Map weights; + private final Map lastReportedSubStepPercentage; + private final ReentrantLock lock = new ReentrantLock(); + + public ObservableProgressTracker(final List steps) { + this(steps, steps.stream().collect(Collectors.toMap(Function.identity(), ProgressStep::getDefaultWeight))); + } + + public ObservableProgressTracker(final List steps, final Map weights) { + this.steps = List.copyOf(steps); + this.weights = Map.copyOf(weights); + this.subStepTotals = steps.stream() + .collect(Collectors.toUnmodifiableMap(Function.identity(), step -> new AtomicLong(SINGLE_STEP))); + this.completedSubSteps = steps.stream() + .collect(Collectors.toUnmodifiableMap(Function.identity(), step -> new AtomicLong(NO_PROGRESS))); + this.lastReportedSubStepPercentage = steps.stream() + .collect(Collectors.toUnmodifiableMap(Function.identity(), step -> new AtomicInteger(NO_PROGRESS))); + reset(); + } + + @Override + public void reset() { + this.steps.forEach(this::reset); + } + + @Override + public void reset(final @NotNull ProgressStep step) { + assertSupports(step); + this.subStepTotals.get(step).set(SINGLE_STEP); + this.completedSubSteps.get(step).set(NO_PROGRESS); + this.lastReportedSubStepPercentage.get(step).set(NO_PROGRESS); + } + + @Override + public void estimateStepSubtotal(final @NotNull ProgressStep step, final long totalSubSteps) { + assertSupports(step); + subStepTotals.get(step).set(totalSubSteps); + } + + @Override + public void recordProgressInSubSteps(final @NotNull ProgressStep step, final long progress) { + assertSupports(step); + if (progress <= 0) { + throw new IllegalArgumentException("The progress must be greater than zero."); + } + completedSubSteps.get(step).addAndGet(progress); + report(step); + } + + @Override + public void completeStep(final @NotNull ProgressStep step) { + assertSupports(step); + completedSubSteps.get(step).set(subStepTotals.get(step).get()); + report(step); + } + + @Override + public void skipStep(final @NotNull ProgressStep step) { + assertSupports(step); + lastReportedSubStepPercentage.get(step).set(NO_PROGRESS); + completedSubSteps.get(step).set(subStepTotals.get(step).get()); + } + + @Override + public void completeAll() { + steps.forEach(this::completeStep); + } + + @Override + public void registerListener(final @NonNull ProgressListener listener) { + listeners.putIfAbsent(listener.getId(), listener); + } + + @Override + public void assertSupports(final @NonNull ProgressStep step) { + if (!supports(step)) { + throw new IllegalStateException("The " + step.getDisplayName() + " step is not supported."); + } + } + + private boolean supports(final @NotNull ProgressStep step) { + return steps.contains(step); + } + + private int calculateTotalProgress() { + final var totalWeights = weights.values().stream().mapToLong(i -> i * COMPLETE).sum(); + final var totalWeightedProgress = steps.stream() + .mapToLong(step -> ((long) calculateProgress(step)) * weights.get(step)) + .sum(); + return calculatePercentage(totalWeights, totalWeightedProgress); + } + + private void report(final @NotNull ProgressStep step) { + final var subProcessPercentage = calculateProgress(step); + if (lastReportedSubStepPercentage.get(step).get() == subProcessPercentage) { + return; + } + try { + lock.lock(); + if (lastReportedSubStepPercentage.get(step).get() == subProcessPercentage) { + return; + } + if (notComplete(subProcessPercentage) + && notOnExactReportFrequency(step, subProcessPercentage) + && progressIsTooLowSinceLastReport(step, subProcessPercentage)) { + return; + } + lastReportedSubStepPercentage.get(step).set(subProcessPercentage); + } finally { + lock.unlock(); + } + final var totalProcess = calculateTotalProgress(); + listeners.forEach((id, listener) -> listener.onProgressChanged(totalProcess, subProcessPercentage, step.getDisplayName())); + } + + private boolean progressIsTooLowSinceLastReport(final @NotNull ProgressStep step, final int subProcessPercentage) { + return lastReportedSubStepPercentage.get(step).get() + step.getReportFrequencyPercent() > subProcessPercentage; + } + + private boolean notOnExactReportFrequency(final @NotNull ProgressStep step, final int subProcessPercentage) { + return subProcessPercentage % step.getReportFrequencyPercent() > 0; + } + + private boolean notComplete(final int subProcessPercentage) { + return subProcessPercentage != COMPLETE; + } + + private int calculateProgress(final @NotNull ProgressStep step) { + final var total = subStepTotals.get(step).get(); + final var done = completedSubSteps.get(step).get(); + return calculatePercentage(total, done); + } + + private int calculatePercentage(final long total, final long done) { + final var percentage = (done * HUNDRED_PERCENT_DOUBLE) / total; + return normalize(percentage); + } + + private int normalize(final double percentage) { + return Math.max(NO_PROGRESS, Math.min(COMPLETE, (int) Math.round(percentage))); + } + +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressListener.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressListener.java new file mode 100644 index 0000000..f0acc0d --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressListener.java @@ -0,0 +1,10 @@ +package com.github.nagyesta.filebarj.core.progress; + +import java.util.UUID; + +public interface ProgressListener { + + UUID getId(); + + void onProgressChanged(int totalProgressPercentage, int stepProgressPercentage, String stepName); +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressStep.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressStep.java new file mode 100644 index 0000000..df09b6d --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressStep.java @@ -0,0 +1,62 @@ +package com.github.nagyesta.filebarj.core.progress; + +import lombok.Getter; + +@Getter +public enum ProgressStep { + /** + * Load manifests for the restore, merge or inspect operation. + */ + LOAD_MANIFESTS("Load manifests", 1, 10), + /** + * Scan the source folders for the files we need to work with. + */ + SCAN_FILES("Scan files", 100, 5), + /** + * Parse the files we have found in the input sources. + */ + PARSE_METADATA("Parse file metadata", 10, 10), + /** + * Backup the change set from the source folder. + */ + BACKUP("Backup", 5, 50), + /** + * Restore directories from tue archive. + */ + RESTORE_DIRECTORIES("Restore directories", 25, 10), + /** + * Restore file contents from tue archive. + */ + RESTORE_CONTENT("Restore content", 5, 50), + /** + * Restore file contents from tue archive. + */ + RESTORE_METADATA("Restore metadata", 25, 10), + /** + * Merge archive increments. + */ + MERGE("Merge increments", 5, 25), + /** + * Verify the content of the restored files. + */ + VERIFY_CONTENT("Content verification", 10, 10), + /** + * Verify the metadata of the restored files. + */ + VERIFY_METADATA("Metadata verification", 10, 10), + /** + * Delete files which are no longer needed. + */ + DELETE_OBSOLETE_FILES("Obsolete file deletion", 25, 1); + + private final String displayName; + private final int reportFrequencyPercent; + private final int defaultWeight; + + ProgressStep(final String displayName, final int reportFrequencyPercent, final int defaultWeight) { + this.displayName = displayName; + this.reportFrequencyPercent = reportFrequencyPercent; + this.defaultWeight = defaultWeight; + } + +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressTracker.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressTracker.java new file mode 100644 index 0000000..ddbf608 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/progress/ProgressTracker.java @@ -0,0 +1,26 @@ +package com.github.nagyesta.filebarj.core.progress; + +import org.jetbrains.annotations.NotNull; + +public interface ProgressTracker extends ObservableProgress { + + void reset(); + + void reset(@NotNull ProgressStep step); + + void estimateStepSubtotal(@NotNull ProgressStep step, long totalSubSteps); + + default void recordProgressInSubSteps(final @NotNull ProgressStep step) { + recordProgressInSubSteps(step, 1L); + } + + void recordProgressInSubSteps(@NotNull ProgressStep step, long progress); + + void completeStep(ProgressStep step); + + void skipStep(ProgressStep step); + + void completeAll(); + + void assertSupports(@NotNull ProgressStep step); +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/DryRunRestorePipeline.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/DryRunRestorePipeline.java index 6e98151..00b5265 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/DryRunRestorePipeline.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/DryRunRestorePipeline.java @@ -31,34 +31,34 @@ public class DryRunRestorePipeline extends RestorePipeline { * @param restoreTargets the mappings of the root paths where we would like to restore * @param kek the key encryption key we would like to use to decrypt files */ - public DryRunRestorePipeline(@NotNull final RestoreManifest manifest, - @NotNull final Path backupDirectory, - @NotNull final RestoreTargets restoreTargets, - @Nullable final PrivateKey kek) { + public DryRunRestorePipeline(final @NotNull RestoreManifest manifest, + final @NotNull Path backupDirectory, + final @NotNull RestoreTargets restoreTargets, + final @Nullable PrivateKey kek) { super(manifest, backupDirectory, restoreTargets, kek, null); } @Override - public void evaluateRestoreSuccess(@NotNull final List files, @NotNull final ForkJoinPool threadPool) { + public void evaluateRestoreSuccess(final @NotNull List files, final @NotNull ForkJoinPool threadPool) { //no-op } @Override - protected void setFileProperties(@NotNull final FileMetadata fileMetadata) { + protected void setFileProperties(final @NotNull FileMetadata fileMetadata) { log.info("> Set file properties for {}", getRestoreTargets().mapToRestorePath(fileMetadata.getAbsolutePath())); } @Override protected void createSymbolicLink( - @NotNull final Path linkTarget, - @NotNull final Path symbolicLink) throws IOException { + final @NotNull Path linkTarget, + final @NotNull Path symbolicLink) throws IOException { log.info("+ Create symbolic link {} -> {}", symbolicLink, linkTarget); } @Override protected void copyRestoredFileToRemainingLocations( - @NotNull final FileMetadata original, - @NotNull final List remainingCopies) { + final @NotNull FileMetadata original, + final @NotNull List remainingCopies) { final var unpackedPath = getRestoreTargets().mapToRestorePath(original.getAbsolutePath()); remainingCopies.forEach(file -> { final var copy = getRestoreTargets().mapToRestorePath(file.getAbsolutePath()); @@ -72,7 +72,7 @@ protected void copyRestoredFileToRemainingLocations( } @Override - protected void createDirectory(@NotNull final Path path) throws IOException { + protected void createDirectory(final @NotNull Path path) throws IOException { if (!Files.exists(path)) { log.info("+ Create directory {}", path); } @@ -80,8 +80,8 @@ protected void createDirectory(@NotNull final Path path) throws IOException { @Override protected void restoreFileContent( - @NotNull final InputStream content, - @NotNull final Path target) { + final @NotNull InputStream content, + final @NotNull Path target) { log.info("+ Create file {}", target); try { content.readAllBytes(); @@ -91,7 +91,7 @@ protected void restoreFileContent( } @Override - protected void deleteIfExists(@NotNull final Path currentFile) { + protected void deleteIfExists(final @NotNull Path currentFile) { if (Files.exists(currentFile)) { if (Files.isDirectory(currentFile)) { log.info("- Delete directory {}", currentFile); diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreController.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreController.java index 8f2d23b..25f36e5 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreController.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreController.java @@ -9,6 +9,9 @@ import com.github.nagyesta.filebarj.core.model.FileMetadata; import com.github.nagyesta.filebarj.core.model.RestoreManifest; import com.github.nagyesta.filebarj.core.model.enums.FileType; +import com.github.nagyesta.filebarj.core.progress.ObservableProgressTracker; +import com.github.nagyesta.filebarj.core.progress.ProgressStep; +import com.github.nagyesta.filebarj.core.progress.ProgressTracker; import com.github.nagyesta.filebarj.core.util.LogUtil; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; @@ -17,9 +20,11 @@ import java.nio.file.Path; import java.security.PrivateKey; +import java.util.List; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.locks.ReentrantLock; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.*; import static com.github.nagyesta.filebarj.core.util.TimerUtil.toProcessSummary; import static com.github.nagyesta.filebarj.io.stream.internal.ChunkingOutputStream.MEBIBYTE; @@ -28,50 +33,35 @@ */ @Slf4j public class RestoreController { + private static final List PROGRESS_STEPS = List.of( + LOAD_MANIFESTS, RESTORE_DIRECTORIES, PARSE_METADATA, RESTORE_CONTENT, + VERIFY_CONTENT, RESTORE_METADATA, VERIFY_METADATA, DELETE_OBSOLETE_FILES); private final RestoreManifest manifest; private final PrivateKey kek; private final Path backupDirectory; private final ReentrantLock executionLock = new ReentrantLock(); private ForkJoinPool threadPool; + private final ProgressTracker progressTracker; /** * Creates a new instance and initializes it for the specified job. * - * @param backupDirectory the directory where the backup files are located - * @param fileNamePrefix the prefix of the backup file names - * @param kek The key encryption key we want to use to decrypt the files (optional). - * If null, no decryption will be performed. + * @param restoreParameters the parameters. */ public RestoreController( - @NotNull final Path backupDirectory, - @NotNull final String fileNamePrefix, - @Nullable final PrivateKey kek) { - this(backupDirectory, fileNamePrefix, kek, Long.MAX_VALUE); - } - - /** - * Creates a new instance and initializes it for the specified job. - * - * @param backupDirectory the directory where the backup files are located - * @param fileNamePrefix the prefix of the backup file names - * @param kek The key encryption key we want to use to decrypt the files (optional). - * If null, no decryption will be performed. - * @param atPointInTime the point in time to restore from (inclusive). - */ - public RestoreController( - @NonNull final Path backupDirectory, - @NonNull final String fileNamePrefix, - @Nullable final PrivateKey kek, - final long atPointInTime) { - this.kek = kek; - this.backupDirectory = backupDirectory; - final ManifestManager manifestManager = new ManifestManagerImpl(); + final @NonNull RestoreParameters restoreParameters) { + this.kek = restoreParameters.getKek(); + this.backupDirectory = restoreParameters.getBackupDirectory(); + this.progressTracker = new ObservableProgressTracker(PROGRESS_STEPS); + progressTracker.registerListener(restoreParameters.getProgressListener()); + final ManifestManager manifestManager = new ManifestManagerImpl(progressTracker); log.info("Loading backup manifests for restore from: {}", backupDirectory); - final var manifests = manifestManager.load(backupDirectory, fileNamePrefix, kek, atPointInTime); + final var manifests = manifestManager + .load(backupDirectory, restoreParameters.getFileNamePrefix(), kek, restoreParameters.getAtPointInTime()); final var header = new ManifestToSummaryConverter().convertToSummaryString(manifests.get(manifests.lastKey())); log.info("Latest backup manifest: {}", header); log.info("Merging {} manifests", manifests.size()); - manifest = manifestManager.mergeForRestore(manifests); + this.manifest = manifestManager.mergeForRestore(manifests); final var filesOfLastManifest = manifest.getFilesOfLastManifest(); LogUtil.logStatistics(filesOfLastManifest.values(), (type, count) -> log.info("Found {} {} items in merged backup", count, type)); @@ -83,13 +73,18 @@ public RestoreController( * @param restoreTask the parameters of the task we need to perform when we execute the restore */ public void execute( - @NonNull final RestoreTask restoreTask) { + final @NonNull RestoreTask restoreTask) { if (restoreTask.getThreads() < 1) { throw new IllegalArgumentException("Invalid number of threads: " + restoreTask.getThreads()); } executionLock.lock(); try { this.threadPool = new ForkJoinPool(restoreTask.getThreads()); + progressTracker.reset(); + progressTracker.skipStep(LOAD_MANIFESTS); + if (!restoreTask.isDeleteFilesNotInBackup()) { + progressTracker.skipStep(DELETE_OBSOLETE_FILES); + } final var allEntries = manifest.getFilesOfLastManifestFilteredBy(restoreTask.getPathFilter()).values().stream().toList(); final var contentSources = manifest.getExistingContentSourceFilesOfLastManifestFilteredBy(restoreTask.getPathFilter()); final long totalBackupSize = allEntries.stream() @@ -101,6 +96,7 @@ public void execute( final var startTimeMillis = System.currentTimeMillis(); final var pipeline = createRestorePipeline( restoreTask.getRestoreTargets(), restoreTask.isDryRun(), restoreTask.getPermissionComparisonStrategy()); + pipeline.setProgressTracker(progressTracker); pipeline.restoreDirectories(allEntries.stream() .filter(metadata -> metadata.getFileType() == FileType.DIRECTORY) .toList()); @@ -120,11 +116,10 @@ public void execute( } } - @NotNull - private RestorePipeline createRestorePipeline( - @NotNull final RestoreTargets restoreTargets, + private @NotNull RestorePipeline createRestorePipeline( + final @NotNull RestoreTargets restoreTargets, final boolean dryRun, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @Nullable PermissionComparisonStrategy permissionStrategy) { final RestorePipeline pipeline; if (dryRun) { pipeline = new DryRunRestorePipeline(manifest, backupDirectory, restoreTargets, kek); @@ -133,5 +128,4 @@ private RestorePipeline createRestorePipeline( } return pipeline; } - } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreParameters.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreParameters.java new file mode 100644 index 0000000..f8cf597 --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreParameters.java @@ -0,0 +1,23 @@ +package com.github.nagyesta.filebarj.core.restore.pipeline; + +import com.github.nagyesta.filebarj.core.progress.LoggingProgressListener; +import com.github.nagyesta.filebarj.core.progress.ProgressListener; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +import java.nio.file.Path; +import java.security.PrivateKey; + +@Data +@Builder +public class RestoreParameters { + private final @NonNull Path backupDirectory; + private final @NonNull String fileNamePrefix; + private final @Nullable PrivateKey kek; + @Builder.Default + private final long atPointInTime = Long.MAX_VALUE; + @Builder.Default + private final @NonNull ProgressListener progressListener = LoggingProgressListener.INSTANCE; +} diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipeline.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipeline.java index 00746cb..2a9a1b5 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipeline.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipeline.java @@ -10,6 +10,9 @@ import com.github.nagyesta.filebarj.core.model.*; import com.github.nagyesta.filebarj.core.model.enums.Change; import com.github.nagyesta.filebarj.core.model.enums.FileType; +import com.github.nagyesta.filebarj.core.progress.NoOpProgressTracker; +import com.github.nagyesta.filebarj.core.progress.ProgressStep; +import com.github.nagyesta.filebarj.core.progress.ProgressTracker; import com.github.nagyesta.filebarj.core.restore.worker.FileMetadataSetter; import com.github.nagyesta.filebarj.core.restore.worker.FileMetadataSetterFactory; import com.github.nagyesta.filebarj.core.util.LogUtil; @@ -46,6 +49,7 @@ import java.util.function.Predicate; import java.util.stream.Collectors; +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.*; import static com.github.nagyesta.filebarj.core.util.TimerUtil.toProcessSummary; import static com.github.nagyesta.filebarj.io.stream.internal.ChunkingOutputStream.MEBIBYTE; @@ -54,7 +58,6 @@ */ @Slf4j public class RestorePipeline { - private final Map cache = new ConcurrentHashMap<>(); private final FileMetadataChangeDetector changeDetector; private final Path backupDirectory; @@ -63,6 +66,7 @@ public class RestorePipeline { private final RestoreManifest manifest; private final FileMetadataSetter fileMetadataSetter; private final ReentrantLock streamLock = new ReentrantLock(); + private @NonNull ProgressTracker progressTracker = new NoOpProgressTracker(); /** * Creates a new pipeline instance for the specified manifests. @@ -73,11 +77,11 @@ public class RestorePipeline { * @param kek the key encryption key we would like to use to decrypt files * @param permissionStrategy the permission comparison strategy */ - public RestorePipeline(@NonNull final RestoreManifest manifest, - @NonNull final Path backupDirectory, - @NonNull final RestoreTargets restoreTargets, - @Nullable final PrivateKey kek, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + public RestorePipeline(final @NonNull RestoreManifest manifest, + final @NonNull Path backupDirectory, + final @NonNull RestoreTargets restoreTargets, + final @Nullable PrivateKey kek, + final @Nullable PermissionComparisonStrategy permissionStrategy) { if (manifest.getMaximumAppVersion().compareTo(new AppVersion()) > 0) { throw new IllegalArgumentException("Manifests were saved with a newer version of the application"); } @@ -95,12 +99,14 @@ public RestorePipeline(@NonNull final RestoreManifest manifest, * * @param directories the directories */ - public void restoreDirectories(@NonNull final List directories) { + public void restoreDirectories(final @NonNull List directories) { log.info("Restoring {} directories", directories.size()); + progressTracker.estimateStepSubtotal(RESTORE_DIRECTORIES, directories.size()); directories.stream() .filter(metadata -> metadata.getFileType() == FileType.DIRECTORY) .sorted(Comparator.comparing(FileMetadata::getAbsolutePath)) .forEachOrdered(this::restoreDirectory); + progressTracker.completeStep(RESTORE_DIRECTORIES); log.info("Restored {} directories", directories.size()); } @@ -111,10 +117,12 @@ public void restoreDirectories(@NonNull final List directories) { * @param threadPool the thread pool we can use for parallel processing */ public void restoreFiles( - @NonNull final Collection contentSources, - @NonNull final ForkJoinPool threadPool) { + final @NonNull Collection contentSources, + final @NonNull ForkJoinPool threadPool) { log.info("Restoring {} items", contentSources.size()); - final var changeStatus = detectChanges(contentSources, threadPool, false); + + progressTracker.estimateStepSubtotal(PARSE_METADATA, contentSources.size()); + final var changeStatus = detectChanges(contentSources, threadPool, false, PARSE_METADATA); final var pathsToRestore = contentSources.stream() .map(FileMetadata::getAbsolutePath) .collect(Collectors.toSet()); @@ -145,23 +153,24 @@ public void restoreFiles( * @param threadPool The thread pool we can use for parallel processing */ public void finalizePermissions( - @NonNull final List files, - @NonNull final ForkJoinPool threadPool) { + final @NonNull List files, + final @NonNull ForkJoinPool threadPool) { log.info("Finalizing metadata for {} files", files.size()); - final var changeStatus = detectChanges(files, threadPool, false); + progressTracker.estimateStepSubtotal(VERIFY_CONTENT, files.size()); + final var changeStatus = detectChanges(files, threadPool, false, VERIFY_CONTENT); final var filesWithMetadataChanges = files.stream() .filter(entry -> changeStatus.get(entry.getAbsolutePath()).isRestoreMetadata()) .toList(); - final var doneCount = new AtomicInteger(); + progressTracker.estimateStepSubtotal(RESTORE_METADATA, filesWithMetadataChanges.size()); filesWithMetadataChanges.stream() .sorted(Comparator.comparing(FileMetadata::getAbsolutePath).reversed()) .forEachOrdered(fileMetadata -> { setFileProperties(fileMetadata); - LogUtil.logIfThresholdReached(doneCount.incrementAndGet(), filesWithMetadataChanges.size(), - (done, total) -> log.info("Finalized metadata for {} of {} paths.", done, total)); + progressTracker.recordProgressInSubSteps(RESTORE_METADATA); }); final var totalCount = LogUtil.countsByType(files); final var changedCount = LogUtil.countsByType(filesWithMetadataChanges); + progressTracker.completeStep(RESTORE_METADATA); changedCount.keySet().forEach(type -> log.info("Finalized metadata for {} of {} {} entries.", changedCount.get(type), totalCount.get(type), type)); } @@ -174,7 +183,7 @@ public void finalizePermissions( * @param threadPool The thread pool we can use for parallel processing */ public void deleteLeftOverFiles( - final BackupPath includedPath, final boolean deleteLeftOverFiles, @NonNull final ForkJoinPool threadPool) { + final BackupPath includedPath, final boolean deleteLeftOverFiles, final @NonNull ForkJoinPool threadPool) { if (!deleteLeftOverFiles) { log.info("Skipping left-over files deletion..."); return; @@ -216,6 +225,7 @@ public void deleteLeftOverFiles( throw new ArchivalException("Failed to delete left-over file: " + path, e); } })).join(); + progressTracker.completeStep(DELETE_OBSOLETE_FILES); log.info("Deleted {} left-over files.", counter.get()); } @@ -227,9 +237,10 @@ public void deleteLeftOverFiles( * @param threadPool The thread pool */ public void evaluateRestoreSuccess( - @NonNull final List files, - @NonNull final ForkJoinPool threadPool) { - final var checkOutcome = detectChanges(files, threadPool, true); + final @NonNull List files, + final @NonNull ForkJoinPool threadPool) { + progressTracker.estimateStepSubtotal(VERIFY_METADATA, files.size()); + final var checkOutcome = detectChanges(files, threadPool, true, VERIFY_METADATA); files.stream() //cannot verify symbolic links because they can be referencing files from the //restore folder which is not necessarily the same as the original backup folder @@ -249,7 +260,7 @@ public void evaluateRestoreSuccess( * * @param fileMetadata the file metadata */ - protected void setFileProperties(@NotNull final FileMetadata fileMetadata) { + protected void setFileProperties(final @NotNull FileMetadata fileMetadata) { final var restorePath = restoreTargets.mapToRestorePath(fileMetadata.getAbsolutePath()); if (!Files.exists(restorePath, LinkOption.NOFOLLOW_LINKS)) { return; @@ -274,7 +285,7 @@ protected RestoreTargets getRestoreTargets() { * @throws IOException if an I/O error occurs */ protected void createSymbolicLink( - @NotNull final Path linkTarget, @NotNull final Path symbolicLink) throws IOException { + final @NotNull Path linkTarget, final @NotNull Path symbolicLink) throws IOException { Files.createSymbolicLink(symbolicLink, linkTarget); } @@ -285,8 +296,8 @@ protected void createSymbolicLink( * @param remainingCopies the remaining copies we need tp restore */ protected void copyRestoredFileToRemainingLocations( - @NotNull final FileMetadata original, - @NotNull final List remainingCopies) { + final @NotNull FileMetadata original, + final @NotNull List remainingCopies) { final var unpackedFile = restoreTargets.mapToRestorePath(original.getAbsolutePath()); remainingCopies.forEach(file -> { final var copy = restoreTargets.mapToRestorePath(file.getAbsolutePath()); @@ -298,6 +309,7 @@ protected void copyRestoredFileToRemainingLocations( } else { Files.copy(unpackedFile, copy); } + progressTracker.recordProgressInSubSteps(RESTORE_CONTENT); } catch (final IOException e) { throw new ArchivalException("Failed to copy file: " + unpackedFile + " to: " + copy, e); } @@ -310,7 +322,7 @@ protected void copyRestoredFileToRemainingLocations( * @param path the path * @throws IOException if an I/O error occurs */ - protected void createDirectory(@NotNull final Path path) throws IOException { + protected void createDirectory(final @NotNull Path path) throws IOException { Files.createDirectories(path); } @@ -320,13 +332,14 @@ protected void createDirectory(@NotNull final Path path) throws IOException { * @param content the content * @param target the target where we need to store the content */ - protected void restoreFileContent(@NotNull final InputStream content, @NotNull final Path target) { + protected void restoreFileContent(final @NotNull InputStream content, final @NotNull Path target) { createParentDirectoryAsFallbackIfMissing(target); try (var outputStream = new FileOutputStream(target.toFile()); var bufferedStream = new BufferedOutputStream(outputStream); var countingStream = new CountingOutputStream(bufferedStream)) { IOUtils.copy(content, countingStream); log.debug("Restored file: {}", target); + progressTracker.recordProgressInSubSteps(RESTORE_CONTENT); } catch (final IOException e) { throw new ArchivalException("Failed to restore content: " + target, e); } @@ -338,7 +351,7 @@ protected void restoreFileContent(@NotNull final InputStream content, @NotNull f * @param currentFile the current file * @throws IOException if an I/O error occurs */ - protected void deleteIfExists(@NotNull final Path currentFile) throws IOException { + protected void deleteIfExists(final @NotNull Path currentFile) throws IOException { if (!Files.exists(currentFile, LinkOption.NOFOLLOW_LINKS)) { return; } @@ -349,7 +362,7 @@ protected void deleteIfExists(@NotNull final Path currentFile) throws IOExceptio } } - private void createParentDirectoryAsFallbackIfMissing(@NotNull final Path target) { + private void createParentDirectoryAsFallbackIfMissing(final @NotNull Path target) { try { if (target.getParent() != null && !Files.exists(target.getParent())) { log.warn("Creating missing parent directory: {}", target.getParent()); @@ -362,7 +375,7 @@ private void createParentDirectoryAsFallbackIfMissing(@NotNull final Path target private void restoreContent( final ForkJoinPool threadPool, - @NotNull final RestoreScope restoreScope, + final @NotNull RestoreScope restoreScope, final int totalFiles) { final var changedContentSourcesByPath = restoreScope.getChangedContentSourcesByPath(); final var size = changedContentSourcesByPath.size(); @@ -370,6 +383,7 @@ private void restoreContent( .map(FileMetadata::getOriginalSizeBytes) .mapToLong(Long::longValue) .sum() / MEBIBYTE; + progressTracker.estimateStepSubtotal(RESTORE_CONTENT, size); log.info("Restoring {} entries with content changes ({} MiB).", size, restoreSize); final var linkPaths = new ConcurrentHashMap(); final var contentSourcesInScopeByLocator = restoreScope.getContentSourcesInScopeByLocator(); @@ -425,13 +439,13 @@ private void restoreContent( linkPaths.putAll(resolvedLinks); })).join(); createSymbolicLinks(linkPaths, threadPool); + progressTracker.completeStep(RESTORE_CONTENT); log.info("Restored {} changed entries of {} files", size, totalFiles); } - @NotNull - private List>> partition( - @NotNull final List input, - @NotNull final Map> contentSourcesInScopeByLocator, + private @NotNull List>> partition( + final @NotNull List input, + final @NotNull Map> contentSourcesInScopeByLocator, final int partitions) { final var chunks = new ArrayList>>(); final var threshold = (input.size() / partitions) + 1; @@ -453,10 +467,10 @@ private List>> partition( } private Map restoreMatchingEntriesOfManifest( - @NotNull final RestoreManifest manifest, - @NotNull final RestoreScope restoreScope, - @NotNull final String prefix, - @NotNull final Map> contentSourcesInScopeByLocator) { + final @NotNull RestoreManifest manifest, + final @NotNull RestoreScope restoreScope, + final @NotNull String prefix, + final @NotNull Map> contentSourcesInScopeByLocator) { final var remaining = contentSourcesInScopeByLocator .keySet().stream() .filter(locator -> manifest.getFileNamePrefixes().get(prefix).contains(locator.getBackupIncrement())) @@ -508,14 +522,13 @@ private Map restoreMatchingEntriesOfManifest( } } - @NotNull - private Map detectChanges( - @NotNull final Collection files, - @NotNull final ForkJoinPool threadPool, - final boolean ignoreLinks) { + private @NotNull Map detectChanges( + final @NotNull Collection files, + final @NotNull ForkJoinPool threadPool, + final boolean ignoreLinks, + final ProgressStep step) { final var parser = FileMetadataParserFactory.newInstance(); final Map changeStatuses = new ConcurrentHashMap<>(); - final var doneCount = new AtomicInteger(); threadPool.submit(() -> files.parallelStream() .filter(fileMetadata -> !ignoreLinks || fileMetadata.getFileType() != FileType.SYMBOLIC_LINK) .forEach(file -> { @@ -526,17 +539,17 @@ private Map detectChanges( final var restorePath = restoreTargets.mapToRestorePath(file.getAbsolutePath()); final var current = parser.parse(restorePath.toFile(), manifest.getConfiguration()); final var change = changeDetector.classifyChange(previous, current); - LogUtil.logIfThresholdReached(doneCount.incrementAndGet(), files.size(), - (done, total) -> log.info("Parsed {} of {} unique paths.", done, total)); + progressTracker.recordProgressInSubSteps(step); changeStatuses.put(file.getAbsolutePath(), change); })).join(); final var stats = new TreeMap<>(changeStatuses.values().stream() .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()))); log.info("Detected changes: {}", stats); + progressTracker.completeStep(step); return changeStatuses; } - private void createSymbolicLinks(@NotNull final ConcurrentHashMap linkPaths, final ForkJoinPool threadPool) { + private void createSymbolicLinks(final @NotNull ConcurrentHashMap linkPaths, final ForkJoinPool threadPool) { threadPool.submit(() -> linkPaths.entrySet().parallelStream().forEach(entry -> { final var metadata = entry.getKey(); final var linkTarget = entry.getValue(); @@ -546,8 +559,9 @@ private void createSymbolicLinks(@NotNull final ConcurrentHashMap archiveEntryPathsInScope, - @NotNull final SequentialBarjCargoArchiveEntry archiveEntry) { + final @NotNull Set archiveEntryPathsInScope, + final @NotNull SequentialBarjCargoArchiveEntry archiveEntry) { final var locator = ArchiveEntryLocator.fromEntryPath(archiveEntry.getPath()); if (locator == null) { return true; @@ -589,8 +603,8 @@ private boolean skipIfNotInScope( } private FileType getSingleEntryType( - @NotNull final Collection files, - @NotNull final UUID archiveEntryId) { + final @NotNull Collection files, + final @NotNull UUID archiveEntryId) { final var types = files.stream() .map(FileMetadata::getFileType) .collect(Collectors.toCollection(TreeSet::new)); @@ -601,9 +615,9 @@ private FileType getSingleEntryType( } private void restoreFileContent( - @NotNull final SequentialBarjCargoArchiveEntry archiveEntry, - @NotNull final FileMetadata fileMetadata, - @Nullable final SecretKey key) { + final @NotNull SequentialBarjCargoArchiveEntry archiveEntry, + final @NotNull FileMetadata fileMetadata, + final @Nullable SecretKey key) { log.debug("Restoring entry: {} to file: {}", archiveEntry.getPath(), fileMetadata.getAbsolutePath()); final var unpackTo = restoreTargets.mapToRestorePath(fileMetadata.getAbsolutePath()); try (var fileContent = archiveEntry.getFileContent(key)) { @@ -614,12 +628,11 @@ private void restoreFileContent( } } - @NotNull - private Path resolveLinkTarget( - @NotNull final SequentialBarjCargoArchiveEntry archiveEntry, - @NotNull final FileMetadata fileMetadata, - @Nullable final SecretKey key, - @NotNull final RestoreScope restoreScope) { + private @NotNull Path resolveLinkTarget( + final @NotNull SequentialBarjCargoArchiveEntry archiveEntry, + final @NotNull FileMetadata fileMetadata, + final @Nullable SecretKey key, + final @NotNull RestoreScope restoreScope) { final var to = restoreTargets.mapToRestorePath(fileMetadata.getAbsolutePath()); try { final var target = FilenameUtils.separatorsToUnix(archiveEntry.getLinkTarget(key)); @@ -638,7 +651,7 @@ private Path resolveLinkTarget( } } - private void skipMetadata(@NotNull final SequentialBarjCargoArchiveEntry archiveEntry) { + private void skipMetadata(final @NotNull SequentialBarjCargoArchiveEntry archiveEntry) { try { archiveEntry.skipMetadata(); } catch (final IOException e) { @@ -646,7 +659,7 @@ private void skipMetadata(@NotNull final SequentialBarjCargoArchiveEntry archive } } - private void restoreDirectory(@NotNull final FileMetadata fileMetadata) { + private void restoreDirectory(final @NotNull FileMetadata fileMetadata) { final var path = restoreTargets.mapToRestorePath(fileMetadata.getAbsolutePath()); log.debug("Restoring directory: {}", path); try { @@ -654,6 +667,7 @@ private void restoreDirectory(@NotNull final FileMetadata fileMetadata) { deleteIfExists(path); } createDirectory(path); + progressTracker.recordProgressInSubSteps(RESTORE_DIRECTORIES); log.debug("Restored directory: {}", path); } catch (final IOException e) { throw new ArchivalException("Failed to restore directory: " + path, e); @@ -662,25 +676,23 @@ private void restoreDirectory(@NotNull final FileMetadata fileMetadata) { } private void restoreFileSequentially( - @NotNull final InputStream inputStream, - @NotNull final FileMetadata fileMetadata) { + final @NotNull InputStream inputStream, + final @NotNull FileMetadata fileMetadata) { final var path = restoreTargets.mapToRestorePath(fileMetadata.getAbsolutePath()); restoreFileContent(inputStream, path); } - @Nullable - private SecretKey getDecryptionKey( - @NotNull final RestoreManifest relevantManifest, - @NotNull final ArchiveEntryLocator entryName) { + private @Nullable SecretKey getDecryptionKey( + final @NotNull RestoreManifest relevantManifest, + final @NotNull ArchiveEntryLocator entryName) { return Optional.ofNullable(kek) .map(k -> relevantManifest.dataDecryptionKey(k, entryName)) .orElse(null); } - @NotNull - private BarjCargoArchiveFileInputStreamSource getStreamSource( - @NotNull final RestoreManifest manifest, - @NotNull final String fileNamePrefix) throws IOException { + private @NotNull BarjCargoArchiveFileInputStreamSource getStreamSource( + final @NotNull RestoreManifest manifest, + final @NotNull String fileNamePrefix) throws IOException { if (cache.containsKey(fileNamePrefix)) { return cache.get(fileNamePrefix); } @@ -704,4 +716,15 @@ private BarjCargoArchiveFileInputStreamSource getStreamSource( streamLock.unlock(); } } + + public void setProgressTracker(final @NonNull ProgressTracker progressTracker) { + progressTracker.assertSupports(RESTORE_DIRECTORIES); + progressTracker.assertSupports(PARSE_METADATA); + progressTracker.assertSupports(RESTORE_CONTENT); + progressTracker.assertSupports(VERIFY_CONTENT); + progressTracker.assertSupports(RESTORE_METADATA); + progressTracker.assertSupports(VERIFY_METADATA); + progressTracker.assertSupports(DELETE_OBSOLETE_FILES); + this.progressTracker = progressTracker; + } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/FileMetadataSetterFactory.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/FileMetadataSetterFactory.java index 16bd6c6..8005736 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/FileMetadataSetterFactory.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/FileMetadataSetterFactory.java @@ -23,8 +23,8 @@ public class FileMetadataSetterFactory { * @return a metadata setter */ public static FileMetadataSetter newInstance( - @NotNull final RestoreTargets restoreTargets, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NotNull RestoreTargets restoreTargets, + final @Nullable PermissionComparisonStrategy permissionStrategy) { return newInstance(restoreTargets, OsUtil.isWindows(), permissionStrategy); } @@ -37,10 +37,9 @@ public static FileMetadataSetter newInstance( * @param permissionStrategy the permission comparison strategy * @return a metadata setter */ - @NotNull - static PosixFileMetadataSetter newInstance( - @NonNull final RestoreTargets restoreTargets, final boolean isWindows, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + static @NotNull PosixFileMetadataSetter newInstance( + final @NonNull RestoreTargets restoreTargets, final boolean isWindows, + final @Nullable PermissionComparisonStrategy permissionStrategy) { if (isWindows) { return new WindowsFileMetadataSetter(restoreTargets, permissionStrategy); } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/PosixFileMetadataSetter.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/PosixFileMetadataSetter.java index e84fa91..c3a48e1 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/PosixFileMetadataSetter.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/PosixFileMetadataSetter.java @@ -44,15 +44,15 @@ public class PosixFileMetadataSetter implements FileMetadataSetter { * @param permissionStrategy the permission comparison strategy */ public PosixFileMetadataSetter( - @NonNull final RestoreTargets restoreTargets, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NonNull RestoreTargets restoreTargets, + final @Nullable PermissionComparisonStrategy permissionStrategy) { this.restoreTargets = restoreTargets; this.permissionComparisonStrategy = Optional.ofNullable(permissionStrategy) .orElse(PermissionComparisonStrategy.STRICT); } @Override - public void setMetadata(@NonNull final FileMetadata metadata) { + public void setMetadata(final @NonNull FileMetadata metadata) { setInitialPermissions(metadata); setHiddenStatus(metadata); setTimestamps(metadata); @@ -61,7 +61,7 @@ public void setMetadata(@NonNull final FileMetadata metadata) { } @Override - public void setInitialPermissions(@NonNull final FileMetadata metadata) { + public void setInitialPermissions(final @NonNull FileMetadata metadata) { if (!permissionComparisonStrategy.isPermissionImportant()) { return; } @@ -74,7 +74,7 @@ public void setInitialPermissions(@NonNull final FileMetadata metadata) { } @Override - public void setPermissions(@NonNull final FileMetadata metadata) { + public void setPermissions(final @NonNull FileMetadata metadata) { if (!permissionComparisonStrategy.isPermissionImportant()) { return; } @@ -87,7 +87,7 @@ public void setPermissions(@NonNull final FileMetadata metadata) { } @Override - public void setTimestamps(@NonNull final FileMetadata metadata) { + public void setTimestamps(final @NonNull FileMetadata metadata) { if (metadata.getFileType() == FileType.SYMBOLIC_LINK) { return; } @@ -103,7 +103,7 @@ public void setTimestamps(@NonNull final FileMetadata metadata) { } @Override - public void setOwnerAndGroup(@NonNull final FileMetadata metadata) { + public void setOwnerAndGroup(final @NonNull FileMetadata metadata) { if (!permissionComparisonStrategy.isOwnerImportant()) { return; } @@ -139,8 +139,8 @@ public void setOwnerAndGroup(@NonNull final FileMetadata metadata) { * @param posixFilePermissions the permissions */ protected void doSetPermissions( - @NotNull final Path filePath, - @NotNull final Set posixFilePermissions) { + final @NotNull Path filePath, + final @NotNull Set posixFilePermissions) { performIoTaskAndHandleException(() -> { final var attributeView = getPosixFileAttributeView(filePath); final var currentPermissions = attributeView.readAttributes().permissions(); @@ -172,8 +172,7 @@ protected void performIoTaskAndHandleException(final Callable task) { } } - @NonNull - private PosixFileAttributeView getPosixFileAttributeView(final Path filePath) { + private @NonNull PosixFileAttributeView getPosixFileAttributeView(final Path filePath) { final var attributeView = Files.getFileAttributeView(filePath, PosixFileAttributeView.class, LinkOption.NOFOLLOW_LINKS); if (attributeView == null) { throw new UnsupportedOperationException("POSIX is not supported on the current FS/OS"); @@ -181,8 +180,7 @@ private PosixFileAttributeView getPosixFileAttributeView(final Path filePath) { return attributeView; } - @NonNull - private static FileTime fromEpochSeconds(final Long time) { + private static @NonNull FileTime fromEpochSeconds(final Long time) { return FileTime.from(Instant.ofEpochSecond(time)); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/WindowsFileMetadataSetter.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/WindowsFileMetadataSetter.java index 1561265..5e72f48 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/WindowsFileMetadataSetter.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/restore/worker/WindowsFileMetadataSetter.java @@ -24,21 +24,21 @@ public class WindowsFileMetadataSetter extends PosixFileMetadataSetter { * @param permissionStrategy the permission comparison strategy */ public WindowsFileMetadataSetter( - @NotNull final RestoreTargets restoreTargets, - @Nullable final PermissionComparisonStrategy permissionStrategy) { + final @NotNull RestoreTargets restoreTargets, + final @Nullable PermissionComparisonStrategy permissionStrategy) { super(restoreTargets, permissionStrategy); } @Override - public void setOwnerAndGroup(@NotNull final FileMetadata metadata) { + public void setOwnerAndGroup(final @NotNull FileMetadata metadata) { //no-op } @Override @SuppressWarnings("ResultOfMethodCallIgnored") protected void doSetPermissions( - @NotNull final Path filePath, - @NotNull final Set posixFilePermissions) { + final @NotNull Path filePath, + final @NotNull Set posixFilePermissions) { performIoTaskAndHandleException(() -> { final var file = filePath.toFile(); file.setExecutable(posixFilePermissions.contains(PosixFilePermission.OWNER_EXECUTE)); @@ -49,7 +49,7 @@ protected void doSetPermissions( } @Override - public void setHiddenStatus(@NonNull final FileMetadata metadata) { + public void setHiddenStatus(final @NonNull FileMetadata metadata) { if (metadata.getFileType() == FileType.SYMBOLIC_LINK) { return; } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/LogUtil.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/LogUtil.java index 58958c5..07dc0a4 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/LogUtil.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/LogUtil.java @@ -17,7 +17,6 @@ public class LogUtil { private static final String RESET = "\033[0;0m"; private static final String RED = "\033[0;31m"; - private static final int FOUR = 4; /** * Makes the message more prominent by applying a red colour. @@ -36,8 +35,8 @@ public static String scary(final String message) { * @param loggingConsumer the consumer */ public static void logStatistics( - @NotNull final Collection ofFiles, - @NotNull final BiConsumer loggingConsumer) { + final @NotNull Collection ofFiles, + final @NotNull BiConsumer loggingConsumer) { countsByType(ofFiles).forEach(loggingConsumer); } @@ -47,29 +46,9 @@ public static void logStatistics( * @param ofFiles the files * @return the counts of each file type */ - @NotNull - public static TreeMap countsByType( - @NotNull final Collection ofFiles) { + public static @NotNull TreeMap countsByType( + final @NotNull Collection ofFiles) { return new TreeMap<>(ofFiles.stream() .collect(Collectors.groupingBy(FileMetadata::getFileType, Collectors.counting()))); } - - /** - * Log if any of the 25, 50, 75, 100% thresholds are reached. - * - * @param done The number of done items - * @param total The total number of items - * @param loggingConsumer The consumer - */ - public static void logIfThresholdReached( - final int done, - final int total, - @NotNull final BiConsumer loggingConsumer) { - final var quarter = total / FOUR; - final var half = quarter + quarter; - final var threeQuarters = half + quarter; - if (done == quarter || done == half || done == threeQuarters || done == total) { - loggingConsumer.accept(done, total); - } - } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/OsUtil.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/OsUtil.java index b8fbef2..0be72a6 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/OsUtil.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/OsUtil.java @@ -25,8 +25,7 @@ public static boolean isWindows() { * * @return the current OS */ - @NotNull - public static OperatingSystem getOs() { + public static @NotNull OperatingSystem getOs() { return OperatingSystem.forOsName(getRawOsName()); } @@ -36,8 +35,7 @@ public static OperatingSystem getOs() { * * @return the raw OS name */ - @NotNull - public static String getRawOsName() { + public static @NotNull String getRawOsName() { return System.getProperty("os.name"); } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/TimerUtil.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/TimerUtil.java index 47810e4..0e785fe 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/TimerUtil.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/util/TimerUtil.java @@ -24,8 +24,7 @@ public final class TimerUtil { * @param totalBackupSizeBytes Total backup size in bytes * @return Process summary */ - @NotNull - public static String toProcessSummary(final long durationMillis, final long totalBackupSizeBytes) { + public static @NotNull String toProcessSummary(final long durationMillis, final long totalBackupSizeBytes) { final var elapsedSeconds = toElapsedSeconds(durationMillis); final var elapsedMinutes = toElapsedMinutes(elapsedSeconds); final var remainingSeconds = getRemainderSecondsOfIncompleteMinutes(elapsedSeconds); @@ -39,13 +38,11 @@ public static String toProcessSummary(final long durationMillis, final long tota * @param durationMillis Duration in milliseconds * @return Process summary */ - @NotNull - public static String toProcessSummary(final long durationMillis) { + public static @NotNull String toProcessSummary(final long durationMillis) { return toProcessSummary(durationMillis, 0L); } - @NotNull - private static String rateToString(final long totalBackupSizeBytes, final BigDecimal elapsedMinutes) { + private static @NotNull String rateToString(final long totalBackupSizeBytes, final BigDecimal elapsedMinutes) { if (totalBackupSizeBytes == 0 || isZero(elapsedMinutes)) { return ""; } @@ -53,13 +50,11 @@ private static String rateToString(final long totalBackupSizeBytes, final BigDec return " (speed: " + rate.toPlainString() + " MiB/min)"; } - @NotNull - private static String secondsToString(final BigDecimal remainingSeconds) { + private static @NotNull String secondsToString(final BigDecimal remainingSeconds) { return remainingSeconds.toPlainString() + " seconds"; } - @NotNull - private static String minutesToString(final BigDecimal elapsedMinutes) { + private static @NotNull String minutesToString(final BigDecimal elapsedMinutes) { if (isZero(elapsedMinutes)) { return ""; } @@ -70,20 +65,17 @@ private static boolean isZero(final BigDecimal elapsedMinutes) { return elapsedMinutes.setScale(0, RoundingMode.DOWN).compareTo(BigDecimal.ZERO) == 0; } - @NotNull - private static BigDecimal getRemainderSecondsOfIncompleteMinutes( + private static @NotNull BigDecimal getRemainderSecondsOfIncompleteMinutes( final BigDecimal elapsedSeconds) { return elapsedSeconds.remainder(MINUTES); } - @NotNull - private static BigDecimal toElapsedMinutes( + private static @NotNull BigDecimal toElapsedMinutes( final BigDecimal elapsedSeconds) { return elapsedSeconds.divide(MINUTES, RoundingMode.HALF_UP); } - @NotNull - private static BigDecimal toElapsedSeconds( + private static @NotNull BigDecimal toElapsedSeconds( final long durationMillis) { return new BigDecimal(durationMillis).setScale(2, RoundingMode.HALF_UP) .divide(SECONDS, 2, RoundingMode.HALF_UP); diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerIntegrationTest.java index 19430cc..2ea968c 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerIntegrationTest.java @@ -57,7 +57,11 @@ void testExecuteShouldCreateValidBackupWhenCalledWithKnownInputData(final int th Files.createSymbolicLink(imageLink.toPath(), exampleFile.toPath()); final var job = getConfiguration(BackupType.FULL, keyPair, sourceDirectory, KEEP_EACH); - final var underTest = new BackupController(job, false); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); + final var underTest = new BackupController(parameters); //when underTest.execute(threads); @@ -135,7 +139,11 @@ void testExecuteShouldCollapseDuplicatesWhenInputDataHasDuplicates(final int thr Files.createSymbolicLink(imageLink.toPath(), exampleFile.toPath()); final var job = getConfiguration(BackupType.FULL, keyPair, sourceDirectory, KEEP_ONE_PER_BACKUP); - final var underTest = new BackupController(job, false); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); + final var underTest = new BackupController(parameters); //when underTest.execute(threads); @@ -207,7 +215,11 @@ void testExecuteShouldThrowExceptionWhenCalledWithZeroOrLess(final int threads) Files.createSymbolicLink(imageLink.toPath(), exampleFile.toPath()); final var keyPair = EncryptionUtil.generateRsaKeyPair(); final var job = getConfiguration(BackupType.FULL, keyPair, sourceDirectory, KEEP_EACH); - final var underTest = new BackupController(job, false); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); + final var underTest = new BackupController(parameters); //when Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.execute(threads)); @@ -226,7 +238,11 @@ void testExecuteShouldThrowExceptionWhenCalledTwiceOnTheSameInstance() throws IO Files.createSymbolicLink(imageLink.toPath(), exampleFile.toPath()); final var keyPair = EncryptionUtil.generateRsaKeyPair(); final var job = getConfiguration(BackupType.FULL, keyPair, sourceDirectory, KEEP_EACH); - final var underTest = new BackupController(job, false); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); + final var underTest = new BackupController(parameters); //when underTest.execute(1); @@ -241,9 +257,13 @@ void testConstructorShouldDefaultToFullBackupWhenCalledWithZeroManifests() { final var sourceDirectory = Path.of(testDataRoot.toString(), "source"); final var keyPair = EncryptionUtil.generateRsaKeyPair(); final var job = getConfiguration(BackupType.INCREMENTAL, keyPair, sourceDirectory, KEEP_EACH); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); //when - final var underTest = new BackupController(job, false); + final var underTest = new BackupController(parameters); final var actual = underTest.getManifest().getBackupType(); //then diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerTest.java index 064ff02..72c0eac 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/BackupControllerTest.java @@ -23,7 +23,20 @@ void testConstructorShouldThrowExceptionWhenCalledWithNull() { //given //when - Assertions.assertThrows(IllegalArgumentException.class, () -> new BackupController(null, false)); + Assertions.assertThrows(IllegalArgumentException.class, + () -> new BackupController(null)); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testBuilderShouldThrowExceptionWhenCalledWithNullJob() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> BackupParameters.builder().job(null)); //then + exception } @@ -45,9 +58,13 @@ void testConstructorShouldGenerateManifestWhenCalledWithValidInput() { .encryptionKey(keyPair.getPublic()) .sources(Set.of(backupSource)) .build(); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); //when - final var underTest = new BackupController(job, false); + final var underTest = new BackupController(parameters); final var actual = underTest.getManifest(); //then diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipelineTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipelineTest.java index eee496b..be5221e 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipelineTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/backup/pipeline/ParallelBackupPipelineTest.java @@ -90,7 +90,11 @@ private BackupIncrementManifest getManifest() { .encryptionKey(keyPair.getPublic()) .sources(Set.of(backupSource)) .build(); - final var backupController = new BackupController(job, false); + final var parameters = BackupParameters.builder() + .job(job) + .forceFull(false) + .build(); + final var backupController = new BackupController(parameters); return backupController.getManifest(); } } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplIntegrationTest.java index a0e3862..64569fe 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplIntegrationTest.java @@ -12,6 +12,7 @@ import com.github.nagyesta.filebarj.core.model.ArchivedFileMetadata; import com.github.nagyesta.filebarj.core.model.BackupPath; import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.progress.NoOpProgressTracker; import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -30,7 +31,7 @@ public class ManifestManagerImplIntegrationTest extends TempFileAwareTest { @Test void testMergeForRestoreShouldKeepLatestFileSetWhenCalledWithValidIncrementalData() throws IOException, InterruptedException { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var source = testDataRoot.resolve("source"); final var config = BackupJobConfiguration.builder() diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java index 06354a2..12fc259 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java @@ -12,6 +12,7 @@ import com.github.nagyesta.filebarj.core.model.BackupPath; import com.github.nagyesta.filebarj.core.model.ValidationRules; import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.progress.NoOpProgressTracker; import com.github.nagyesta.filebarj.io.stream.crypto.EncryptionUtil; import jakarta.validation.ValidationException; import org.junit.jupiter.api.Assertions; @@ -42,7 +43,7 @@ class ManifestManagerImplTest extends TempFileAwareTest { @Test void testGenerateManifestShouldAllowOverridingTheBackupTypeWhenCalledWithFullBackupOfIncrementalConfiguration() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when final var actual = underTest.generateManifest(configuration, BackupType.FULL, 0); @@ -57,7 +58,7 @@ void testGenerateManifestShouldAllowOverridingTheBackupTypeWhenCalledWithFullBac @Test void testGenerateManifestShouldThrowExceptionWhenCalledWithNullConfiguration() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -70,7 +71,7 @@ void testGenerateManifestShouldThrowExceptionWhenCalledWithNullConfiguration() { @Test void testGenerateManifestShouldThrowExceptionWhenCalledWithNullBackupType() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -82,7 +83,7 @@ void testGenerateManifestShouldThrowExceptionWhenCalledWithNullBackupType() { @Test void testLoadShouldReadPreviouslyPersistedManifestWhenUsingEncryption() throws IOException { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var keyPair = EncryptionUtil.generateRsaKeyPair(); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() @@ -117,7 +118,7 @@ void testLoadShouldReadPreviouslyPersistedManifestWhenUsingEncryption() throws I @Test void testLoadShouldReadPreviouslyPersistedManifestWhenNotUsingEncryption() throws IOException { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -151,7 +152,7 @@ void testLoadShouldReadPreviouslyPersistedManifestWhenNotUsingEncryption() throw void testLoadShouldFilterOutManifestsAfterThresholdWhenATimeStampIsProvided() throws IOException, InterruptedException { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -191,7 +192,7 @@ void testLoadShouldFilterOutManifestsAfterThresholdWhenATimeStampIsProvided() void testLoadShouldFilterOutManifestsBeforeLatestFullBackupWhenMultipleFullBackupsAreEligible() throws IOException, InterruptedException { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -228,7 +229,7 @@ void testLoadShouldFilterOutManifestsBeforeLatestFullBackupWhenMultipleFullBacku @Test void testLoadShouldThrowExceptionWhenAPreviousVersionIsMissing() throws InterruptedException { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -259,7 +260,7 @@ void testLoadShouldThrowExceptionWhenAPreviousVersionIsMissing() throws Interrup @Test void testPersistShouldThrowExceptionWhenCalledWithNull() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.persist(null)); @@ -271,7 +272,7 @@ void testPersistShouldThrowExceptionWhenCalledWithNull() { @Test void testPersistShouldThrowExceptionWhenCalledWithNullManifest() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.persist(null, Path.of("destination"))); @@ -283,7 +284,7 @@ void testPersistShouldThrowExceptionWhenCalledWithNullManifest() { @Test void testPersistShouldThrowExceptionWhenCalledWithNullDestination() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -296,7 +297,7 @@ void testPersistShouldThrowExceptionWhenCalledWithNullDestination() { @Test void testLoadShouldThrowExceptionWhenCalledWithNullDirectory() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -323,7 +324,7 @@ void testLoadShouldThrowExceptionWhenCalledWithNullDirectory() { @Test void testLoadShouldThrowExceptionWhenCalledWithNullPrefix() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -350,7 +351,7 @@ void testLoadShouldThrowExceptionWhenCalledWithNullPrefix() { @Test void testValidateShouldThrowExceptionWhenCalledWithNullManifest() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -363,7 +364,7 @@ void testValidateShouldThrowExceptionWhenCalledWithNullManifest() { @Test void testValidateShouldThrowExceptionWhenCalledWithNullRules() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -387,7 +388,7 @@ void testValidateShouldThrowExceptionWhenCalledWithNullRules() { @Test void testValidateShouldThrowExceptionWhenCalledWithInvalidData() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); final var destinationDirectory = testDataRoot.resolve("destination"); final var config = BackupJobConfiguration.builder() .fileNamePrefix("prefix") @@ -412,7 +413,7 @@ void testValidateShouldThrowExceptionWhenCalledWithInvalidData() { @Test void testMergeForRestoreShouldThrowExceptionWhenCalledWithNull() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -425,7 +426,7 @@ void testMergeForRestoreShouldThrowExceptionWhenCalledWithNull() { @Test void testDeleteIncrementShouldThrowExceptionWhenCalledWithNullManifest() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.deleteIncrement(Path.of("destination"), null)); @@ -437,7 +438,7 @@ void testDeleteIncrementShouldThrowExceptionWhenCalledWithNullManifest() { @Test void testDeleteIncrementShouldThrowExceptionWhenCalledWithNullDestination() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -450,7 +451,7 @@ void testDeleteIncrementShouldThrowExceptionWhenCalledWithNullDestination() { @Test void testLoadPreviousManifestsForBackupShouldThrowExceptionWhenCalledWithNull() { //given - final var underTest = new ManifestManagerImpl(); + final var underTest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE); //when Assertions.assertThrows(IllegalArgumentException.class, @@ -459,6 +460,17 @@ void testLoadPreviousManifestsForBackupShouldThrowExceptionWhenCalledWithNull() //then + exception } + @SuppressWarnings("DataFlowIssue") + @Test + void testConstructorShouldThrowExceptionWhenCalledWithNull() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, () -> new ManifestManagerImpl(null)); + + //then + exception + } + private void simulateThatADirectoryWasArchived(final BackupIncrementManifest expected) { expected.setIndexFileName("index"); expected.setDataFileNames(List.of("data")); diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionControllerTest.java index c10d826..7fa835d 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionControllerTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/delete/IncrementDeletionControllerTest.java @@ -2,6 +2,7 @@ import com.github.nagyesta.filebarj.core.TempFileAwareTest; import com.github.nagyesta.filebarj.core.backup.pipeline.BackupController; +import com.github.nagyesta.filebarj.core.backup.pipeline.BackupParameters; import com.github.nagyesta.filebarj.core.config.BackupJobConfiguration; import com.github.nagyesta.filebarj.core.config.BackupSource; import com.github.nagyesta.filebarj.core.config.enums.CompressionAlgorithm; @@ -44,24 +45,34 @@ public Stream validParameterProvider() { @SuppressWarnings("DataFlowIssue") @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullBackupDirectory() { + void testBuilderShouldThrowExceptionWhenCalledWithNullBackupDirectory() { //given - final var fileNamePrefix = "prefix"; //when - assertThrows(IllegalArgumentException.class, () -> new IncrementDeletionController(null, fileNamePrefix, null)); + assertThrows(IllegalArgumentException.class, + () -> IncrementDeletionParameters.builder().backupDirectory(null)); //then + exception } @SuppressWarnings("DataFlowIssue") @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullPrefix() { + void testBuilderShouldThrowExceptionWhenCalledWithNullPrefix() { //given - final var backupDirectory = testDataRoot; //when - assertThrows(IllegalArgumentException.class, () -> new IncrementDeletionController(backupDirectory, null, null)); + assertThrows(IllegalArgumentException.class, () -> IncrementDeletionParameters.builder().fileNamePrefix(null)); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testConstructorShouldThrowExceptionWhenCalledWithNull() { + //given + + //when + assertThrows(IllegalArgumentException.class, () -> new IncrementDeletionController(null)); //then + exception } @@ -82,7 +93,12 @@ void testDeleteIncrementsShouldReturnSummariesWhenCalledWithStream( doBackup(backupDirectory, originalDirectory, prefix); Thread.sleep(ONE_SECOND); } - final var underTest = new IncrementDeletionController(backupDirectory, prefix, null); + final var parameters = IncrementDeletionParameters.builder() + .backupDirectory(backupDirectory) + .fileNamePrefix(prefix) + .kek(null) + .build(); + final var underTest = new IncrementDeletionController(parameters); final var firstBackupStarted = Arrays.stream(Objects.requireNonNull(backupDirectory.toFile().list())) .filter(child -> child.startsWith(prefix) && child.endsWith(".manifest.cargo")) .sorted() @@ -121,6 +137,10 @@ private static void doBackup( .compression(CompressionAlgorithm.NONE) .duplicateStrategy(DuplicateHandlingStrategy.KEEP_ONE_PER_BACKUP) .build(); - new BackupController(configuration, false).execute(1); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(false) + .build(); + new BackupController(parameters).execute(1); } } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java index 8802009..c93ab29 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java @@ -2,6 +2,7 @@ import com.github.nagyesta.filebarj.core.TempFileAwareTest; import com.github.nagyesta.filebarj.core.backup.pipeline.BackupController; +import com.github.nagyesta.filebarj.core.backup.pipeline.BackupParameters; import com.github.nagyesta.filebarj.core.config.BackupJobConfiguration; import com.github.nagyesta.filebarj.core.config.BackupSource; import com.github.nagyesta.filebarj.core.config.enums.CompressionAlgorithm; @@ -30,24 +31,33 @@ class IncrementInspectionControllerTest extends TempFileAwareTest { @SuppressWarnings("DataFlowIssue") @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullBackupDirectory() { + void testConstructorShouldThrowExceptionWhenCalledWithNull() { //given - final var fileNamePrefix = "prefix"; //when - assertThrows(IllegalArgumentException.class, () -> new IncrementInspectionController(null, fileNamePrefix, null)); + assertThrows(IllegalArgumentException.class, () -> new IncrementInspectionController(null)); //then + exception } @SuppressWarnings("DataFlowIssue") @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullPrefix() { + void testBuilderShouldThrowExceptionWhenCalledWithNullBackupDirectory() { //given - final var backupDirectory = testDataRoot; //when - assertThrows(IllegalArgumentException.class, () -> new IncrementInspectionController(backupDirectory, null, null)); + assertThrows(IllegalArgumentException.class, () -> InspectParameters.builder().backupDirectory(null)); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testBuilderShouldThrowExceptionWhenCalledWithNullPrefix() { + //given + + //when + assertThrows(IllegalArgumentException.class, () -> InspectParameters.builder().fileNamePrefix(null)); //then + exception } @@ -61,7 +71,12 @@ void testInspectContentShouldThrowExceptionWhenCalledWithNullOutputFile() throws Files.createDirectories(originalDirectory); Files.writeString(originalDirectory.resolve("file1.txt"), "content"); doBackup(backupDirectory, originalDirectory, "prefix"); - final var underTest = new IncrementInspectionController(backupDirectory, "prefix", null); + final var parameters = InspectParameters.builder() + .backupDirectory(backupDirectory) + .fileNamePrefix("prefix") + .kek(null) + .build(); + final var underTest = new IncrementInspectionController(parameters); //when assertThrows(IllegalArgumentException.class, () -> underTest.inspectContent(Long.MAX_VALUE, null)); @@ -79,7 +94,12 @@ void testInspectContentShouldWriteContentWhenCalledWithNullOutputFile() throws I Files.writeString(originalFile, "content"); final var prefix = "file-prefix"; doBackup(backupDirectory, originalDirectory, prefix); - final var underTest = new IncrementInspectionController(backupDirectory, prefix, null); + final var parameters = InspectParameters.builder() + .backupDirectory(backupDirectory) + .fileNamePrefix(prefix) + .kek(null) + .build(); + final var underTest = new IncrementInspectionController(parameters); final var outputFile = originalDirectory.resolve("content.csv"); //when @@ -103,7 +123,12 @@ void testInspectIncrementsShouldThrowExceptionWhenCalledWithNull() throws IOExce Files.createDirectories(originalDirectory); Files.writeString(originalDirectory.resolve("file1.txt"), "content"); doBackup(backupDirectory, originalDirectory, "prefix"); - final var underTest = new IncrementInspectionController(backupDirectory, "prefix", null); + final var parameters = InspectParameters.builder() + .backupDirectory(backupDirectory) + .fileNamePrefix("prefix") + .kek(null) + .build(); + final var underTest = new IncrementInspectionController(parameters); //when assertThrows(IllegalArgumentException.class, () -> underTest.inspectIncrements(null)); @@ -123,7 +148,12 @@ void testInspectIncrementsShouldReturnSummariesWhenCalledWithStream() throws IOE doBackup(backupDirectory, originalDirectory, prefix); Thread.sleep(ONE_SECOND); } - final var underTest = new IncrementInspectionController(backupDirectory, prefix, null); + final var parameters = InspectParameters.builder() + .backupDirectory(backupDirectory) + .fileNamePrefix(prefix) + .kek(null) + .build(); + final var underTest = new IncrementInspectionController(parameters); final var byteArrayOutputStream = new ByteArrayOutputStream(); final var printStream = new PrintStream(byteArrayOutputStream); @@ -152,6 +182,10 @@ private static void doBackup( .compression(CompressionAlgorithm.NONE) .duplicateStrategy(DuplicateHandlingStrategy.KEEP_ONE_PER_BACKUP) .build(); - new BackupController(configuration, false).execute(1); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(false) + .build(); + new BackupController(parameters).execute(1); } } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java index 2c09c97..946d917 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java @@ -7,6 +7,7 @@ import com.github.nagyesta.filebarj.core.config.RestoreTask; import com.github.nagyesta.filebarj.core.model.BackupPath; import com.github.nagyesta.filebarj.core.restore.pipeline.RestoreController; +import com.github.nagyesta.filebarj.core.restore.pipeline.RestoreParameters; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledOnOs; @@ -84,16 +85,34 @@ public Stream validRangeProvider() { .build(); } + @SuppressWarnings("DataFlowIssue") + @Test + void testConstructorShouldThrowExceptionWhenCalledWithNull() { + //given + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(null)); + + //then + exception + } + @Test void testConstructorShouldThrowExceptionWhenCalledWithInvalidStartTime() throws IOException { //given final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(0L) + .rangeEndEpochSeconds(B_INCREMENT_1) + .build(); //when Assertions.assertThrows(IllegalArgumentException.class, - () -> new MergeController(backupPath, UBUNTU_BACKUP, null, 0L, B_INCREMENT_1)); + () -> new MergeController(parameters)); //then + exception } @@ -104,10 +123,17 @@ void testConstructorShouldThrowExceptionWhenCalledWithInvalidEndTime() throws IO final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_FIRST_FULL) + .rangeEndEpochSeconds(B_INCREMENT_1 + 1L) + .build(); //when Assertions.assertThrows(IllegalArgumentException.class, - () -> new MergeController(backupPath, UBUNTU_BACKUP, null, B_FIRST_FULL, B_INCREMENT_1 + 1L)); + () -> new MergeController(parameters)); //then + exception } @@ -119,9 +145,16 @@ void testConstructorShouldNotThrowExceptionWhenCalledWithValidRange(final long s final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(start) + .rangeEndEpochSeconds(end) + .build(); //when - final var actual = new MergeController(backupPath, UBUNTU_BACKUP, null, start, end); + final var actual = new MergeController(parameters); //then Assertions.assertNotNull(actual); @@ -134,7 +167,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndFirstIncremen final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(BOTH_SETS_UBUNTU_BACKUP, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_FIRST_FULL, B_INCREMENT_1); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_FIRST_FULL) + .rangeEndEpochSeconds(B_INCREMENT_1) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -159,7 +199,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndFirstIncremen final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(BOTH_SETS_UBUNTU_ENCRYPTED, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_FIRST_FULL, E_INCREMENT_1); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_ENCRYPTED) + .kek(KEK) + .rangeStartEpochSeconds(E_FIRST_FULL) + .rangeEndEpochSeconds(E_INCREMENT_1) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -184,7 +231,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithAllIncrementsWithoutEncryp final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_INCREMENT_3); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_INCREMENT_1) + .rangeEndEpochSeconds(B_INCREMENT_3) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -211,7 +265,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithAllIncrementsWithEncryptio final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_ENCRYPTED, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_INCREMENT_3); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_ENCRYPTED) + .kek(KEK) + .rangeStartEpochSeconds(E_INCREMENT_1) + .rangeEndEpochSeconds(E_INCREMENT_3) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -238,7 +299,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithFirstTwoIncrementsWithoutE final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_INCREMENT_2); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_INCREMENT_1) + .rangeEndEpochSeconds(B_INCREMENT_2) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -265,7 +333,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithFirstTwoIncrementsWithEncr final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_ENCRYPTED, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_INCREMENT_2); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_ENCRYPTED) + .kek(KEK) + .rangeStartEpochSeconds(E_INCREMENT_1) + .rangeEndEpochSeconds(E_INCREMENT_2) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -296,7 +371,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndTwoIncrements UB_INCREMENT_1, UB_INCREMENT_2); prepareBackupFiles(prefixes, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_FIRST_FULL, B_INCREMENT_2); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_FIRST_FULL) + .rangeEndEpochSeconds(B_INCREMENT_2) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -327,7 +409,14 @@ void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndTwoIncrements UE_INCREMENT_1, UE_INCREMENT_2); prepareBackupFiles(prefixes, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_FIRST_FULL, E_INCREMENT_2); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_ENCRYPTED) + .kek(KEK) + .rangeStartEpochSeconds(E_FIRST_FULL) + .rangeEndEpochSeconds(E_INCREMENT_2) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -355,7 +444,14 @@ void testExecuteShouldDeleteIncrementsFromSelectedRangeBeforeFullBackupWhenCalle final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(BOTH_SETS_UBUNTU_BACKUP, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_SECOND_FULL); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_INCREMENT_1) + .rangeEndEpochSeconds(B_SECOND_FULL) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -383,7 +479,14 @@ void testExecuteShouldDeleteIncrementsFromSelectedRangeBeforeFullBackupWhenCalle final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(BOTH_SETS_UBUNTU_ENCRYPTED, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_SECOND_FULL); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_ENCRYPTED) + .kek(KEK) + .rangeStartEpochSeconds(E_INCREMENT_1) + .rangeEndEpochSeconds(E_SECOND_FULL) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(true); @@ -410,7 +513,14 @@ void testExecuteShouldNotDeleteFilesWhenCalledWithFalseFlagWithoutEncryption() t final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_INCREMENT_2); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_BACKUP) + .kek(null) + .rangeStartEpochSeconds(B_INCREMENT_1) + .rangeEndEpochSeconds(B_INCREMENT_2) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(false); @@ -427,7 +537,14 @@ void testExecuteShouldNotDeleteFilesWhenCalledWithFalseFlagWithEncryption() thro final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); prepareBackupFiles(FIRST_SET_UBUNTU_ENCRYPTED, backupPath); - final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_INCREMENT_2); + final var parameters = MergeParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(UBUNTU_ENCRYPTED) + .kek(KEK) + .rangeStartEpochSeconds(E_INCREMENT_1) + .rangeEndEpochSeconds(E_INCREMENT_2) + .build(); + final var underTest = new MergeController(parameters); //when final var actual = underTest.execute(false); @@ -467,8 +584,12 @@ private void restoreBackups( .threads(1) .permissionComparisonStrategy(PermissionComparisonStrategy.RELAXED) .build(); - new RestoreController(backupPath, fileNamePrefix, kek) - .execute(task); + final var parameters = RestoreParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix(fileNamePrefix) + .kek(kek) + .build(); + new RestoreController(parameters).execute(task); verifyContents(restoredR, rContents); verifyContents(restoredU, uContents); } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerTest.java deleted file mode 100644 index 251e592..0000000 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerTest.java +++ /dev/null @@ -1,54 +0,0 @@ -package com.github.nagyesta.filebarj.core.merge; - -import com.github.nagyesta.filebarj.core.TempFileAwareTest; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class MergeControllerTest extends TempFileAwareTest { - - @SuppressWarnings("DataFlowIssue") - @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullDirectory() { - //given - - //when - Assertions.assertThrows(IllegalArgumentException.class, - () -> new MergeController(null, "prefix", null, 0L, 1L)); - - //then + exception - } - - @SuppressWarnings("DataFlowIssue") - @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullPrefix() { - //given - - //when - Assertions.assertThrows(IllegalArgumentException.class, - () -> new MergeController(testDataRoot, null, null, 0L, 1L)); - - //then + exception - } - - @Test - void testConstructorShouldThrowExceptionWhenCalledWithEndTimeLaterThanStartTime() { - //given - - //when - Assertions.assertThrows(IllegalArgumentException.class, - () -> new MergeController(testDataRoot, "prefix", null, 0L, -1L)); - - //then + exception - } - - @Test - void testConstructorShouldThrowExceptionWhenCalledWithStartTimeEqualToEndTime() { - //given - - //when - Assertions.assertThrows(IllegalArgumentException.class, - () -> new MergeController(testDataRoot, "prefix", null, 0L, 0L)); - - //then + exception - } -} diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeParametersTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeParametersTest.java new file mode 100644 index 0000000..091103a --- /dev/null +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeParametersTest.java @@ -0,0 +1,64 @@ +package com.github.nagyesta.filebarj.core.merge; + +import com.github.nagyesta.filebarj.core.TempFileAwareTest; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class MergeParametersTest extends TempFileAwareTest { + + @SuppressWarnings("DataFlowIssue") + @Test + void testBuilderShouldThrowExceptionWhenCalledWithNullDirectory() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> MergeParameters.builder().backupDirectory(null)); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testBuilderShouldThrowExceptionWhenCalledWithNullPrefix() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> MergeParameters.builder().fileNamePrefix(null)); + + //then + exception + } + + @Test + void testAssertValidShouldThrowExceptionWhenCalledWithEndTimeLaterThanStartTime() { + //given + final var underTest = MergeParameters.builder() + .backupDirectory(testDataRoot) + .fileNamePrefix("prefix") + .rangeStartEpochSeconds(0L) + .rangeEndEpochSeconds(-1L) + .build(); + + //when + Assertions.assertThrows(IllegalArgumentException.class, underTest::assertValid); + + //then + exception + } + + @Test + void testAssertValidShouldThrowExceptionWhenCalledWithStartTimeEqualToEndTime() { + //given + final var underTest = MergeParameters.builder() + .backupDirectory(testDataRoot) + .fileNamePrefix("prefix") + .rangeStartEpochSeconds(0L) + .rangeEndEpochSeconds(0L) + .build(); + + //when + Assertions.assertThrows(IllegalArgumentException.class, underTest::assertValid); + + //then + exception + } +} diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/model/BackupPathTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/model/BackupPathTest.java index ce66703..a43e497 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/model/BackupPathTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/model/BackupPathTest.java @@ -111,7 +111,7 @@ void testOfWithPathShouldThrowExceptionWhenCalledWithNull() { //given //when - Assertions.assertThrows(IllegalArgumentException.class, () -> BackupPath.of((Path) null)); + Assertions.assertThrows(IllegalArgumentException.class, () -> BackupPath.of(null)); //then + exception } @@ -135,7 +135,7 @@ void testOfWithOneStringShouldThrowExceptionWhenCalledWithNull() { //given //when - Assertions.assertThrows(IllegalArgumentException.class, () -> BackupPath.ofPathAsIs((String) null)); + Assertions.assertThrows(IllegalArgumentException.class, () -> BackupPath.ofPathAsIs(null)); //then + exception } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/progress/ObservableProgressTrackerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/progress/ObservableProgressTrackerTest.java new file mode 100644 index 0000000..b65faff --- /dev/null +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/progress/ObservableProgressTrackerTest.java @@ -0,0 +1,235 @@ +package com.github.nagyesta.filebarj.core.progress; + +import com.github.nagyesta.filebarj.core.TempFileAwareTest; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static com.github.nagyesta.filebarj.core.progress.ProgressStep.*; +import static org.mockito.Mockito.*; + +class ObservableProgressTrackerTest extends TempFileAwareTest { + + private static final long SUB_STEPS_SCAN = 200L; + private static final long SUB_STEPS_PARSE = 6000L; + private static final long SUB_STEPS_BACKUP = 500000L; + private static final int HUNDRED_PERCENT = 100; + private static final int SCALE = 10; + private static final BigDecimal TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS = BigDecimal.valueOf(HUNDRED_PERCENT) + .divide(BigDecimal.valueOf(3), SCALE, RoundingMode.HALF_UP); + private static final int PARSE_DEFAULT_STEPS = 10; + private static final int BACKUP_DEFAULT_STEPS = 5; + + @Test + void testCompleteStepShouldReportHundredPercentAsSubProgressWhenCalled() { + //given + final var underTest = createUnderTest(Map.of()); + final var listener = registerListener(underTest); + + //when + underTest.completeStep(SCAN_FILES); + underTest.completeStep(PARSE_METADATA); + underTest.completeStep(BACKUP); + + //then + final var inOrder = inOrder(listener); + inOrder.verify(listener).getId(); + var totalProgressCounter = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS; + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, SCAN_FILES.getDisplayName()); + totalProgressCounter = totalProgressCounter.add(TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, PARSE_METADATA.getDisplayName()); + totalProgressCounter = totalProgressCounter.add(TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, BACKUP.getDisplayName()); + } + + @Test + void tesRecordProgressInSubStepsShouldReportAtRegularFrequencyWhenSmallStepsAreMade() { + //given + final var underTest = createUnderTest(Map.of()); + final var listener = registerListener(underTest); + final var scanStepping = SUB_STEPS_SCAN / 100L; + final var parseStepping = SUB_STEPS_PARSE / 200L; + final var backupStepping = SUB_STEPS_BACKUP / 200L; + + //when + for (var i = 0L; i < SUB_STEPS_SCAN; i += scanStepping) { + underTest.recordProgressInSubSteps(SCAN_FILES); + } + underTest.completeStep(SCAN_FILES); + for (var i = 0L; i < SUB_STEPS_PARSE; i += parseStepping) { + underTest.recordProgressInSubSteps(PARSE_METADATA, parseStepping); + } + underTest.completeStep(PARSE_METADATA); + for (var i = 0L; i < SUB_STEPS_BACKUP; i += backupStepping) { + underTest.recordProgressInSubSteps(BACKUP, backupStepping); + } + + //then + final var inOrder = inOrder(listener); + inOrder.verify(listener).getId(); + var totalProgressCounter = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS; + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, SCAN_FILES.getDisplayName()); + final var parseTotalProcessStep = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.TEN, SCALE, RoundingMode.HALF_UP); + for (var subProgress = PARSE_DEFAULT_STEPS; subProgress <= HUNDRED_PERCENT; subProgress += PARSE_DEFAULT_STEPS) { + totalProgressCounter = totalProgressCounter.add(parseTotalProcessStep); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), subProgress, PARSE_METADATA.getDisplayName()); + } + final var backupTotalProcessStep = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.valueOf(20), SCALE, RoundingMode.HALF_UP); + for (var subProgress = BACKUP_DEFAULT_STEPS; subProgress <= HUNDRED_PERCENT; subProgress += BACKUP_DEFAULT_STEPS) { + totalProgressCounter = totalProgressCounter.add(backupTotalProcessStep); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), subProgress, BACKUP.getDisplayName()); + } + } + + @Test + void tesRecordProgressInSubStepsShouldReportAtRegularFrequencyWhenSmallStepsAreMadeWithUnequalWeights() { + //given + final var underTest = createUnderTest(Map.of(PARSE_METADATA, 2, BACKUP, 3)); + final var listener = registerListener(underTest); + final var scanStepping = SUB_STEPS_SCAN / 100L; + final var parseStepping = SUB_STEPS_PARSE / 200L; + final var backupStepping = SUB_STEPS_BACKUP / 200L; + + //when + for (var i = 0L; i < SUB_STEPS_SCAN; i += scanStepping) { + underTest.recordProgressInSubSteps(SCAN_FILES); + } + underTest.completeStep(SCAN_FILES); + for (var i = 0L; i < SUB_STEPS_PARSE; i += parseStepping) { + underTest.recordProgressInSubSteps(PARSE_METADATA, parseStepping); + } + underTest.completeStep(PARSE_METADATA); + for (var i = 0L; i < SUB_STEPS_BACKUP; i += backupStepping) { + underTest.recordProgressInSubSteps(BACKUP, backupStepping); + } + + //then + final var inOrder = inOrder(listener); + inOrder.verify(listener).getId(); + var totalProgressCounter = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.valueOf(2), SCALE, RoundingMode.HALF_UP); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, SCAN_FILES.getDisplayName()); + final var parseTotalProcessStep = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.valueOf(HUNDRED_PERCENT / PARSE_DEFAULT_STEPS), SCALE, RoundingMode.HALF_UP); + for (var subProgress = PARSE_DEFAULT_STEPS; subProgress <= HUNDRED_PERCENT; subProgress += PARSE_DEFAULT_STEPS) { + totalProgressCounter = totalProgressCounter.add(parseTotalProcessStep); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), subProgress, PARSE_METADATA.getDisplayName()); + } + final var backupTotalProcessStep = BigDecimal.valueOf(HUNDRED_PERCENT / 2) + .divide(BigDecimal.valueOf(HUNDRED_PERCENT / BACKUP_DEFAULT_STEPS), SCALE, RoundingMode.HALF_UP); + for (var subProgress = BACKUP_DEFAULT_STEPS; subProgress <= HUNDRED_PERCENT; subProgress += BACKUP_DEFAULT_STEPS) { + totalProgressCounter = totalProgressCounter.add(backupTotalProcessStep); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), subProgress, BACKUP.getDisplayName()); + } + } + + @Test + void tesRecordProgressInSubStepsShouldReportWhenABigStepIsTakenLargerThanFrequency() { + //given + final var underTest = createUnderTest(Map.of()); + final var listener = registerListener(underTest); + final var scanStepping = SUB_STEPS_SCAN / 2L; + final var parseStepping = SUB_STEPS_PARSE / 6L; + final var backupStepping = SUB_STEPS_BACKUP / 4L; + + //when + for (var i = 0L; i < SUB_STEPS_SCAN; i += scanStepping) { + underTest.recordProgressInSubSteps(SCAN_FILES); + } + underTest.completeStep(SCAN_FILES); + for (var i = 0L; i < SUB_STEPS_PARSE; i += parseStepping) { + underTest.recordProgressInSubSteps(PARSE_METADATA, parseStepping); + } + underTest.completeStep(PARSE_METADATA); + for (var i = 0L; i < SUB_STEPS_BACKUP; i += backupStepping) { + underTest.recordProgressInSubSteps(BACKUP, backupStepping); + } + + //then + final var inOrder = inOrder(listener); + inOrder.verify(listener).getId(); + var totalProgressCounter = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS; + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, SCAN_FILES.getDisplayName()); + final var parseTotalProcessStep = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.valueOf(6), SCALE, RoundingMode.HALF_UP); + final var parseSubProcStep = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.valueOf(2), SCALE, RoundingMode.HALF_UP); + for (var subProgress = parseSubProcStep; toInt(subProgress) <= HUNDRED_PERCENT; subProgress = subProgress.add(parseSubProcStep)) { + totalProgressCounter = totalProgressCounter.add(parseTotalProcessStep); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), toInt(subProgress), PARSE_METADATA.getDisplayName()); + } + final var backupTotalProcessStep = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS + .divide(BigDecimal.valueOf(4), SCALE, RoundingMode.HALF_UP); + final var backupPercentageSteps = 25; + for (var subProgress = backupPercentageSteps; subProgress <= HUNDRED_PERCENT; subProgress += backupPercentageSteps) { + totalProgressCounter = totalProgressCounter.add(backupTotalProcessStep); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), subProgress, BACKUP.getDisplayName()); + } + } + + @Test + void testCompleteAllShouldCompleteAllStepsOneByOneWhenCalled() { + //given + final var underTest = createUnderTest(Map.of()); + final var listener = registerListener(underTest); + + //when + underTest.completeAll(); + + //then + final var inOrder = inOrder(listener); + inOrder.verify(listener).getId(); + var totalProgressCounter = TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS; + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, SCAN_FILES.getDisplayName()); + totalProgressCounter = totalProgressCounter.add(TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, PARSE_METADATA.getDisplayName()); + totalProgressCounter = totalProgressCounter.add(TOTAL_PROGRESS_PER_STEP_EQUAL_WEIGHTS); + inOrder.verify(listener).onProgressChanged(toInt(totalProgressCounter), HUNDRED_PERCENT, BACKUP.getDisplayName()); + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testRegisterListenerShouldThrowExceptionWhenCalledWithNull() { + //given + final var underTest = createUnderTest(Map.of()); + + //when + Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.registerListener(null)); + underTest.completeAll(); + + //then + exception + } + + private static int toInt(final BigDecimal totalProgressCounter) { + return totalProgressCounter + .setScale(1, RoundingMode.HALF_UP) + .setScale(0, RoundingMode.HALF_UP) + .intValue(); + } + + private static ProgressListener registerListener(final ObservableProgressTracker underTest) { + final var listener = mock(ProgressListener.class); + when(listener.getId()).thenReturn(UUID.randomUUID()); + underTest.registerListener(listener); + return listener; + } + + private static ObservableProgressTracker createUnderTest(final Map weights) { + final var steps = List.of(SCAN_FILES, PARSE_METADATA, BACKUP); + final var underTest = new ObservableProgressTracker(steps, steps.stream() + .collect(Collectors.toMap(Function.identity(), step -> weights.getOrDefault(step, 1)))); + underTest.estimateStepSubtotal(SCAN_FILES, SUB_STEPS_SCAN); + underTest.estimateStepSubtotal(PARSE_METADATA, SUB_STEPS_PARSE); + underTest.estimateStepSubtotal(BACKUP, SUB_STEPS_BACKUP); + return underTest; + } +} diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java index 9bac3bd..c62a5fe 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java @@ -34,10 +34,15 @@ void testRestoreShouldRestoreContentWhenRestoringABackupMadeOnWindows() throws I .threads(1) .permissionComparisonStrategy(PermissionComparisonStrategy.RELAXED) .build(); + final var parameters = RestoreParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix("windows-backup") + .kek(null) + .atPointInTime(Long.MAX_VALUE) + .build(); //when - new RestoreController(backupPath, "windows-backup", null) - .execute(task); + new RestoreController(parameters).execute(task); //then verifyContent(restoredR, restoredU); @@ -61,10 +66,15 @@ void testRestoreShouldRestoreContentWhenRestoringABackupMadeOnUnix() throws IOEx .threads(1) .permissionComparisonStrategy(PermissionComparisonStrategy.RELAXED) .build(); + final var parameters = RestoreParameters.builder() + .backupDirectory(backupPath) + .fileNamePrefix("ubuntu-backup") + .kek(null) + .atPointInTime(Long.MAX_VALUE) + .build(); //when - new RestoreController(backupPath, "ubuntu-backup", null) - .execute(task); + new RestoreController(parameters).execute(task); //then verifyContent(restoredR, restoredU); diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerIntegrationTest.java index 1e0680c..bd0043c 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerIntegrationTest.java @@ -3,6 +3,7 @@ import com.github.nagyesta.filebarj.core.TempFileAwareTest; import com.github.nagyesta.filebarj.core.backup.ArchivalException; import com.github.nagyesta.filebarj.core.backup.pipeline.BackupController; +import com.github.nagyesta.filebarj.core.backup.pipeline.BackupParameters; import com.github.nagyesta.filebarj.core.backup.worker.FileMetadataParser; import com.github.nagyesta.filebarj.core.backup.worker.FileMetadataParserFactory; import com.github.nagyesta.filebarj.core.config.*; @@ -55,10 +56,15 @@ void testConstructorShouldThrowExceptionWhenCalledWithAPathWithoutBackups() { final var source = testDataRoot.resolve("source-dir" + UUID.randomUUID()); final var backup = testDataRoot.resolve("backup-dir" + UUID.randomUUID()); final var configuration = getBackupJobConfiguration(BackupType.FULL, source, backup, null, HashAlgorithm.SHA256); + final var parameters = RestoreParameters.builder() + .backupDirectory(configuration.getDestinationDirectory()) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(null) + .atPointInTime(Long.MAX_VALUE) + .build(); //when - Assertions.assertThrows(ArchivalException.class, () -> new RestoreController( - configuration.getDestinationDirectory(), configuration.getFileNamePrefix(), null)); + Assertions.assertThrows(ArchivalException.class, () -> new RestoreController(parameters)); //then + exception } @@ -71,10 +77,19 @@ void testExecuteShouldThrowExceptionWhenCalledWithNull() throws IOException { final var backup = testDataRoot.resolve("backup-dir" + UUID.randomUUID()); final var configuration = getBackupJobConfiguration(BackupType.FULL, source, backup, null, HashAlgorithm.SHA256); FileUtils.copyFile(getExampleResource(), source.resolve("A.png").toFile()); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); - final var underTest = new RestoreController( - configuration.getDestinationDirectory(), configuration.getFileNamePrefix(), null); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(configuration.getDestinationDirectory()) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(null) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); //when Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.execute(null)); @@ -90,11 +105,20 @@ void testExecuteShouldThrowExceptionWhenCalledWithLessThanOneThreads() throws IO final var restore = testDataRoot.resolve("restore-dir" + UUID.randomUUID()); final var configuration = getBackupJobConfiguration(BackupType.FULL, source, backup, null, HashAlgorithm.SHA256); FileUtils.copyFile(getExampleResource(), source.resolve("A.png").toFile()); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); - final var underTest = new RestoreController( - configuration.getDestinationDirectory(), configuration.getFileNamePrefix(), null); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(configuration.getDestinationDirectory()) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(null) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(backup), restore))); //when @@ -137,13 +161,22 @@ void testExecuteShouldRestoreFilesToDestinationWhenExecutedWithValidInput( final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); Files.move(backupDir, movedBackupDir); - final var underTest = new RestoreController( - movedBackupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(movedBackupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); //when @@ -195,13 +228,22 @@ void testExecuteShouldRestoreOnlyIncludedFilesToDestinationWhenExecutedWithInclu final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); Files.move(backupDir, movedBackupDir); - final var underTest = new RestoreController( - movedBackupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(movedBackupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); final var realRestorePath = restoreTargets.mapToRestorePath(BackupPath.of(sourceDir)); final var restoredAPng = realRestorePath.resolve(aPng.getFileName().toString()); @@ -275,11 +317,20 @@ void testExecuteShouldRestoreFilesToDestinationWhenTargetFilesAlreadyExistWithDi final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); - final var underTest = new RestoreController( - backupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); final var realRestorePath = restoreTargets.mapToRestorePath(BackupPath.of(sourceDir)); Files.createDirectories(realRestorePath); @@ -339,11 +390,20 @@ void testExecuteShouldOnlySimulateRestoreWhenTargetFilesAlreadyExistWithDifferen final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); - final var underTest = new RestoreController( - backupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); final var realRestorePath = restoreTargets.mapToRestorePath(BackupPath.of(sourceDir)); Files.createDirectories(realRestorePath); @@ -404,11 +464,20 @@ void testExecuteShouldRestoreFilesToDestinationWhenTargetFilesAlreadyExistWithPa final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); - final var underTest = new RestoreController( - backupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); final var realRestorePath = restoreTargets.mapToRestorePath(BackupPath.of(sourceDir)); Files.createDirectories(realRestorePath); @@ -475,7 +544,11 @@ void testExecuteShouldRestoreFilesToDestinationWhenExecutedWithIncrementalBackup final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(originalLinkExternal, externalLinkTarget); - new BackupController(configuration, true).execute(1); + final var parameters1 = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + new BackupController(parameters1).execute(1); Files.delete(deleted); final var expectedChangedContent = "changed content"; @@ -492,10 +565,19 @@ void testExecuteShouldRestoreFilesToDestinationWhenExecutedWithIncrementalBackup final var fullBackupTime = Instant.now().getEpochSecond(); Thread.sleep(A_SECOND); - new BackupController(configuration, false).execute(1); + final var parameters2 = BackupParameters.builder() + .job(configuration) + .forceFull(false) + .build(); + new BackupController(parameters2).execute(1); //create restore controller to read full backup increment - final var restoreFullBackup = new RestoreController( - backupDir, configuration.getFileNamePrefix(), decryptionKey, fullBackupTime); + final var restoreParameters1 = RestoreParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(fullBackupTime) + .build(); + final var restoreFullBackup = new RestoreController(restoreParameters1); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); final var restoreTask = RestoreTask.builder() .restoreTargets(restoreTargets) @@ -509,8 +591,13 @@ void testExecuteShouldRestoreFilesToDestinationWhenExecutedWithIncrementalBackup Assertions.assertTrue(Files.exists(realRestorePath.resolve("folder/deleted.png"))); //recreate restore controller to read new backup increment - final var underTest = new RestoreController( - backupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters2 = RestoreParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters2); //when underTest.execute(restoreTask); @@ -564,11 +651,20 @@ void testExecuteShouldNotRestoreAnyFilesWhenExecutedWithValidInputUsingDryRun( final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); - final var underTest = new RestoreController( - backupDir, configuration.getFileNamePrefix(), decryptionKey); + final var restoreParameters = RestoreParameters.builder() + .backupDirectory(backupDir) + .fileNamePrefix(configuration.getFileNamePrefix()) + .kek(decryptionKey) + .atPointInTime(Long.MAX_VALUE) + .build(); + final var underTest = new RestoreController(restoreParameters); final var restoreTargets = new RestoreTargets(Set.of(new RestoreTarget(BackupPath.of(sourceDir), restoreDir))); //when diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerTest.java index d9fd6d3..b1062a3 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestoreControllerTest.java @@ -3,30 +3,40 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.nio.file.Path; - class RestoreControllerTest { @SuppressWarnings("DataFlowIssue") @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullDirectory() { + void testBuilderShouldThrowExceptionWhenCalledWithNullDirectory() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> RestoreParameters.builder().backupDirectory(null)); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testBuilderShouldThrowExceptionWhenCalledWithNullPrefix() { //given //when Assertions.assertThrows(IllegalArgumentException.class, - () -> new RestoreController(null, "prefix", null)); + () -> RestoreParameters.builder().fileNamePrefix(null)); //then + exception } @SuppressWarnings("DataFlowIssue") @Test - void testConstructorShouldThrowExceptionWhenCalledWithNullPrefix() { + void testConstructorShouldThrowExceptionWhenCalledWithNull() { //given //when Assertions.assertThrows(IllegalArgumentException.class, - () -> new RestoreController(Path.of("dir"), null, null)); + () -> new RestoreController(null)); //then + exception } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipelineIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipelineIntegrationTest.java index 3971a9c..a070379 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipelineIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/RestorePipelineIntegrationTest.java @@ -2,6 +2,7 @@ import com.github.nagyesta.filebarj.core.TempFileAwareTest; import com.github.nagyesta.filebarj.core.backup.pipeline.BackupController; +import com.github.nagyesta.filebarj.core.backup.pipeline.BackupParameters; import com.github.nagyesta.filebarj.core.common.ManifestManagerImpl; import com.github.nagyesta.filebarj.core.common.PermissionComparisonStrategy; import com.github.nagyesta.filebarj.core.config.BackupJobConfiguration; @@ -15,6 +16,7 @@ import com.github.nagyesta.filebarj.core.model.BackupPath; import com.github.nagyesta.filebarj.core.model.RestoreManifest; import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.progress.NoOpProgressTracker; import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -117,7 +119,8 @@ void testConstructorShouldThrowExceptionWhenCalledWithNullSourcePath() throws IO //given final var backupController = executeABackup(); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -136,7 +139,8 @@ void testConstructorShouldThrowExceptionWhenCalledWithNullTargetPath() throws IO //given final var backupController = executeABackup(); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var backupDirectory = testDataRoot.resolve("backup-dir"); //when @@ -155,7 +159,8 @@ void testFinalizePermissionsShouldThrowExceptionWhenCalledWithNullFiles() throws final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -177,7 +182,8 @@ void testFinalizePermissionsShouldThrowExceptionWhenCalledWithNullMap() throws I final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -199,7 +205,8 @@ void testRestoreFilesShouldThrowExceptionWhenCalledWithNullContentSources() thro final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -221,7 +228,8 @@ void testRestoreFilesShouldThrowExceptionWhenCalledWithNullThreadPool() throws I final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -246,7 +254,8 @@ void testEvaluateRestoreSuccessShouldThrowExceptionWhenCalledWithNullFiles() thr final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -268,7 +277,8 @@ void testEvaluateRestoreSuccessShouldThrowExceptionWhenCalledWithNullThreadPool( final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -289,7 +299,8 @@ void testEvaluateRestoreSuccessShouldNotThrowExceptionWhenCalledWithoutRestoring final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -315,7 +326,8 @@ void testRestoreDirectoriesShouldThrowExceptionWhenCalledWithNull() throws IOExc final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -355,10 +367,15 @@ void testPartialRestoreShouldRestoreFilesToDestinationWhenExecutedWithValidInput final var externalLinkTarget = getExampleResource().toPath().toAbsolutePath(); Files.createSymbolicLink(sourceLinkExternal, externalLinkTarget); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var restoreTargets = getRestoreTargets(BackupPath.of(sourceDir), restoreDir); final var underTest = new RestorePipeline( @@ -398,7 +415,8 @@ void testDeleteLeftOverFilesShouldThrowExceptionWhenCalledWithNullThreadPool() t final var backupDirectory = testDataRoot.resolve("backup-dir"); final var restoreDirectory = testDataRoot.resolve("restore-dir"); final var manifest = backupController.getManifest(); - final var restoreManifest = new ManifestManagerImpl().mergeForRestore(new TreeMap<>(Map.of(0, manifest))); + final var restoreManifest = new ManifestManagerImpl(NoOpProgressTracker.INSTANCE) + .mergeForRestore(new TreeMap<>(Map.of(0, manifest))); final var sourceDirectory = getSourceDirectory(backupController); final var restoreTargets = getRestoreTargets(sourceDirectory, restoreDirectory); @@ -427,7 +445,11 @@ private BackupController executeABackup() throws IOException { final var backup = testDataRoot.resolve("backup-dir" + UUID.randomUUID()); final var configuration = getBackupJobConfiguration(source, backup); FileUtils.copyFile(getExampleResource(), source.resolve("A.png").toFile()); - final var backupController = new BackupController(configuration, true); + final var parameters = BackupParameters.builder() + .job(configuration) + .forceFull(true) + .build(); + final var backupController = new BackupController(parameters); backupController.execute(1); return backupController; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java index cc8dd67..c855a62 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java @@ -2,14 +2,19 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.github.nagyesta.filebarj.core.backup.pipeline.BackupController; +import com.github.nagyesta.filebarj.core.backup.pipeline.BackupParameters; import com.github.nagyesta.filebarj.core.config.BackupJobConfiguration; import com.github.nagyesta.filebarj.core.config.RestoreTarget; import com.github.nagyesta.filebarj.core.config.RestoreTargets; import com.github.nagyesta.filebarj.core.config.RestoreTask; import com.github.nagyesta.filebarj.core.delete.IncrementDeletionController; +import com.github.nagyesta.filebarj.core.delete.IncrementDeletionParameters; import com.github.nagyesta.filebarj.core.inspect.pipeline.IncrementInspectionController; +import com.github.nagyesta.filebarj.core.inspect.pipeline.InspectParameters; import com.github.nagyesta.filebarj.core.merge.MergeController; +import com.github.nagyesta.filebarj.core.merge.MergeParameters; import com.github.nagyesta.filebarj.core.restore.pipeline.RestoreController; +import com.github.nagyesta.filebarj.core.restore.pipeline.RestoreParameters; import com.github.nagyesta.filebarj.io.stream.crypto.EncryptionUtil; import com.github.nagyesta.filebarj.job.cli.*; import com.github.nagyesta.filebarj.job.util.KeyStoreUtil; @@ -119,7 +124,12 @@ protected void doInspectContent(final InspectIncrementContentsProperties propert final var startTimeMillis = System.currentTimeMillis(); log.info("Bootstrapping inspect content operation..."); final var pointInTimeEpochSeconds = properties.getPointInTimeEpochSeconds(); - new IncrementInspectionController(properties.getBackupSource(), properties.getPrefix(), kek) + new IncrementInspectionController( + InspectParameters.builder() + .backupDirectory(properties.getBackupSource()) + .fileNamePrefix(properties.getPrefix()) + .kek(kek) + .build()) .inspectContent(pointInTimeEpochSeconds, properties.getOutputFile()); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = (endTimeMillis - startTimeMillis); @@ -130,7 +140,12 @@ protected void doInspectIncrements(final InspectIncrementsProperties properties) final var kek = getPrivateKey(properties.getKeyProperties()); final var startTimeMillis = System.currentTimeMillis(); log.info("Bootstrapping inspect increments operation..."); - new IncrementInspectionController(properties.getBackupSource(), properties.getPrefix(), kek) + new IncrementInspectionController( + InspectParameters.builder() + .backupDirectory(properties.getBackupSource()) + .fileNamePrefix(properties.getPrefix()) + .kek(kek) + .build()) .inspectIncrements(System.out); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = (endTimeMillis - startTimeMillis); @@ -141,7 +156,12 @@ protected void doDeleteIncrements(final DeleteIncrementsProperties properties) { final var kek = getPrivateKey(properties.getKeyProperties()); final var startTimeMillis = System.currentTimeMillis(); log.info("Bootstrapping delete increments operation..."); - new IncrementDeletionController(properties.getBackupSource(), properties.getPrefix(), kek) + new IncrementDeletionController( + IncrementDeletionParameters.builder() + .backupDirectory(properties.getBackupSource()) + .fileNamePrefix(properties.getPrefix()) + .kek(kek) + .build()) .deleteIncrementsUntilNextFullBackupAfter(properties.getAfterEpochSeconds()); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = (endTimeMillis - startTimeMillis); @@ -175,7 +195,13 @@ protected void doRestore(final RestoreProperties properties) { .includedPath(properties.getIncludedPath()) .permissionComparisonStrategy(properties.getPermissionComparisonStrategy()) .build(); - new RestoreController(properties.getBackupSource(), properties.getPrefix(), kek, properties.getPointInTimeEpochSeconds()) + new RestoreController( + RestoreParameters.builder() + .backupDirectory(properties.getBackupSource()) + .fileNamePrefix(properties.getPrefix()) + .kek(kek) + .atPointInTime(properties.getPointInTimeEpochSeconds()) + .build()) .execute(restoreTask); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = (endTimeMillis - startTimeMillis); @@ -186,8 +212,14 @@ protected void doMerge(final MergeProperties properties) { final var kek = getPrivateKey(properties.getKeyProperties()); final var startTimeMillis = System.currentTimeMillis(); log.info("Bootstrapping merge operation..."); - new MergeController(properties.getBackupSource(), properties.getPrefix(), kek, - properties.getFromTimeEpochSeconds(), properties.getToTimeEpochSeconds()) + new MergeController( + MergeParameters.builder() + .backupDirectory(properties.getBackupSource()) + .fileNamePrefix(properties.getPrefix()) + .kek(kek) + .rangeStartEpochSeconds(properties.getFromTimeEpochSeconds()) + .rangeEndEpochSeconds(properties.getToTimeEpochSeconds()) + .build()) .execute(properties.isDeleteObsoleteFiles()); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = (endTimeMillis - startTimeMillis); @@ -198,7 +230,11 @@ protected void doBackup(final BackupProperties properties) throws IOException { final var config = new ObjectMapper().reader().readValue(properties.getConfig().toFile(), BackupJobConfiguration.class); final var startTimeMillis = System.currentTimeMillis(); log.info("Bootstrapping backup operation..."); - new BackupController(config, properties.isForceFullBackup()) + new BackupController( + BackupParameters.builder() + .job(config) + .forceFull(properties.isForceFullBackup()) + .build()) .execute(properties.getThreads()); final var endTimeMillis = System.currentTimeMillis(); final var durationMillis = (endTimeMillis - startTimeMillis); @@ -216,8 +252,7 @@ private void printBanner() throws IOException { .forEach(System.out::println); } - @Nullable - private PrivateKey getPrivateKey(final KeyStoreProperties keyProperties) { + private @Nullable PrivateKey getPrivateKey(final KeyStoreProperties keyProperties) { return Optional.ofNullable(keyProperties) .map(keyStoreProperties -> KeyStoreUtil .readPrivateKey( diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupFileProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupFileProperties.java index 70c3c6e..5d05a37 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupFileProperties.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupFileProperties.java @@ -14,11 +14,8 @@ @Data @SuperBuilder public class BackupFileProperties { - @NonNull - private final Path backupSource; - @Valid - private final KeyStoreProperties keyProperties; + private final @NonNull Path backupSource; + private final @Valid KeyStoreProperties keyProperties; @FileNamePrefix - @NonNull - private final String prefix; + private final @NonNull String prefix; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupProperties.java index 5fb63e0..f537fbe 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupProperties.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/BackupProperties.java @@ -13,9 +13,7 @@ @Data @Builder public class BackupProperties { - @NonNull - private final Path config; - @Positive - private final int threads; + private final @NonNull Path config; + private final @Positive int threads; private final boolean forceFullBackup; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliICommonBackupFileParser.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliICommonBackupFileParser.java index c6b2fee..d5c8039 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliICommonBackupFileParser.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliICommonBackupFileParser.java @@ -82,8 +82,7 @@ protected Options createOptions() { .desc("Defines the prefix of the backup files inside the backup directory.").build()); } - @Nullable - protected static KeyStoreProperties parseKeyProperties(final Console console, final CommandLine commandLine) { + protected static @Nullable KeyStoreProperties parseKeyProperties(final Console console, final CommandLine commandLine) { KeyStoreProperties keyProperties = null; if (commandLine.hasOption(KEY_STORE)) { final var keyStore = Path.of(commandLine.getOptionValue(KEY_STORE)).toAbsolutePath(); diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/DeleteIncrementsProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/DeleteIncrementsProperties.java index 8bf975b..a3d6d6d 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/DeleteIncrementsProperties.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/DeleteIncrementsProperties.java @@ -12,7 +12,6 @@ @SuperBuilder @EqualsAndHashCode(callSuper = true) public class DeleteIncrementsProperties extends BackupFileProperties { - @Positive - private final long afterEpochSeconds; + private final @Positive long afterEpochSeconds; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/GenericCliParser.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/GenericCliParser.java index 4d6ae75..19844af 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/GenericCliParser.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/GenericCliParser.java @@ -14,7 +14,7 @@ public abstract class GenericCliParser { private static final int MAX_WIDTH = 120; - private T result; + private final T result; /** * Creates a new instance and sets the input arguments. diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/InspectIncrementContentsProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/InspectIncrementContentsProperties.java index 7b772bb..603fb5e 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/InspectIncrementContentsProperties.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/InspectIncrementContentsProperties.java @@ -15,8 +15,6 @@ @SuperBuilder @EqualsAndHashCode(callSuper = true) public class InspectIncrementContentsProperties extends BackupFileProperties { - @Positive - private final long pointInTimeEpochSeconds; - @NonNull - private final Path outputFile; + private final @Positive long pointInTimeEpochSeconds; + private final @NonNull Path outputFile; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/KeyStoreProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/KeyStoreProperties.java index d384446..23457db 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/KeyStoreProperties.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/KeyStoreProperties.java @@ -12,10 +12,8 @@ @Data @Builder public class KeyStoreProperties { - @NonNull - private final Path keyStore; + private final @NonNull Path keyStore; private final char[] password; - @NonNull @Builder.Default - private final String alias = "default"; + private final @NonNull String alias = "default"; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/RestoreProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/RestoreProperties.java index 75ac81c..e0fcde2 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/RestoreProperties.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/RestoreProperties.java @@ -20,15 +20,12 @@ @SuperBuilder @EqualsAndHashCode(callSuper = true) public class RestoreProperties extends BackupFileProperties { - @NonNull - private final Map targets; - @Positive - private final int threads; + private final @NonNull Map targets; + private final @Positive int threads; private final boolean dryRun; private final boolean deleteFilesNotInBackup; @PastOrPresentEpochSeconds private final long pointInTimeEpochSeconds; - @Valid - private final BackupPath includedPath; + private final @Valid BackupPath includedPath; private final PermissionComparisonStrategy permissionComparisonStrategy; } diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/util/KeyStoreUtil.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/util/KeyStoreUtil.java index 686c468..70f808f 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/util/KeyStoreUtil.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/util/KeyStoreUtil.java @@ -41,8 +41,8 @@ public final class KeyStoreUtil { * @return the private key */ public static PrivateKey readPrivateKey( - @NotNull final Path source, - @NotNull final String alias, + final @NotNull Path source, + final @NotNull String alias, final char @NotNull [] storePass, final char @NotNull [] keyPass) { try { @@ -62,8 +62,8 @@ public static PrivateKey readPrivateKey( * @return the public key */ public static PublicKey readPublicKey( - @NotNull final Path source, - @NotNull final String alias, + final @NotNull Path source, + final @NotNull String alias, final char @NotNull [] storePass) { try { return KeyStore.getInstance(source.toFile(), storePass) @@ -83,9 +83,9 @@ public static PublicKey readPublicKey( * @param keyPass the password protecting the key */ public static void writeKey( - @NotNull final Path target, - @NotNull final String alias, - @NotNull final KeyPair keyPair, + final @NotNull Path target, + final @NotNull String alias, + final @NotNull KeyPair keyPair, final char @NotNull [] storePass, final char @NotNull [] keyPass) { try (var stream = Files.newOutputStream(target)) { @@ -98,9 +98,8 @@ public static void writeKey( } } - @NotNull - private static Certificate[] createChainFor( - @NotNull final KeyPair keyPair) throws Exception { + private static @NotNull Certificate[] createChainFor( + final @NotNull KeyPair keyPair) throws Exception { final var subject = new X500Name("CN=Ignore"); final var now = new Date(Instant.now().toEpochMilli()); final var future = new Date(Instant.now().toEpochMilli() + ONE_HUNDRED_YEARS_IN_MILLIS); @@ -116,8 +115,7 @@ private static Certificate[] createChainFor( return new X509Certificate[]{converter.getCertificate(holder)}; } - @NotNull - private static KeyStore newKeyStore( + private static @NotNull KeyStore newKeyStore( final char @NotNull [] password) throws Exception { final var store = KeyStore.getInstance(PKCS_12); store.load(null, password); diff --git a/file-barj-job/src/main/resources/logback.xml b/file-barj-job/src/main/resources/logback.xml index aee912e..3d9bc30 100644 --- a/file-barj-job/src/main/resources/logback.xml +++ b/file-barj-job/src/main/resources/logback.xml @@ -7,7 +7,7 @@ - %white(%d{HH:mm:ss.SSS}) %boldYellow([%thread]) %highlight(%-5level) %highlight(%logger{36}) - %msg%n%ex{3} + %white(%d{HH:mm:ss.SSS}) %boldYellow([%8.8thread{8}]) %highlight(%-5level) %highlight(%25.25logger{0}) - %msg%n%ex{3} diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveEntryIterator.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveEntryIterator.java index 31b5a78..2073b68 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveEntryIterator.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveEntryIterator.java @@ -28,8 +28,8 @@ public class BarjCargoArchiveEntryIterator implements Iterator list) throws IOException { + final @NonNull BarjCargoArchiveFileInputStreamSource source, + final @NonNull List list) throws IOException { this.source = source; this.inputStream = source.openStreamForSequentialAccess(); this.iterator = list.listIterator(); @@ -44,9 +44,9 @@ public BarjCargoArchiveEntryIterator( * @throws IOException when the list cannot be read */ public BarjCargoArchiveEntryIterator( - @NonNull final BarjCargoArchiveFileInputStreamSource source, - @NonNull final List relevantFiles, - @NonNull final List list) throws IOException { + final @NonNull BarjCargoArchiveFileInputStreamSource source, + final @NonNull List relevantFiles, + final @NonNull List list) throws IOException { this.source = source; this.inputStream = source.openStreamForSequentialAccess(relevantFiles, list); this.iterator = list.listIterator(); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java index 8c5b552..4e90e03 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSource.java @@ -57,7 +57,7 @@ public class BarjCargoArchiveFileInputStreamSource { * @throws IOException If we cannot access the folder or read from it. * @throws ArchiveIntegrityException If the archive is in an invalid state. */ - public BarjCargoArchiveFileInputStreamSource(@NotNull final BarjCargoInputStreamConfiguration config) + public BarjCargoArchiveFileInputStreamSource(final @NotNull BarjCargoInputStreamConfiguration config) throws IOException, ArchiveIntegrityException { final var folderPath = config.getFolder().toAbsolutePath(); final var indexFile = Path.of(folderPath.toString(), toIndexFileName(config.getPrefix())); @@ -81,7 +81,7 @@ public BarjCargoArchiveFileInputStreamSource(@NotNull final BarjCargoInputStream * @return the entry stored in the archive or {@code null} if not found */ public BarjCargoArchiveEntry getEntry( - @NonNull final String path) { + final @NonNull String path) { return entityIndexes.stream() .filter(e -> e.getPath().equals(path)) .map(index -> new RandomAccessBarjCargoArchiveEntry(this, index)) @@ -108,7 +108,7 @@ public BarjCargoArchiveEntryIterator getIterator() throws IOException { * @throws IOException if an I/O error occurs */ public BarjCargoArchiveEntryIterator getIteratorForScope( - @NonNull final Set archiveEntriesInScope) throws IOException { + final @NonNull Set archiveEntriesInScope) throws IOException { final var orderedMatches = getMatchingEntriesInOrderOfOccurrence(archiveEntriesInScope); if (orderedMatches.isEmpty()) { return new BarjCargoArchiveEntryIterator(this, Collections.emptyList()); @@ -139,9 +139,8 @@ public BarjCargoArchiveEntryIterator getIteratorForScope( * @param archiveEntriesInScope the entries in scope * @return the matching entries */ - @NonNull - public List getMatchingEntriesInOrderOfOccurrence( - @NonNull final Set archiveEntriesInScope) { + public @NonNull List getMatchingEntriesInOrderOfOccurrence( + final @NonNull Set archiveEntriesInScope) { final var normalized = archiveEntriesInScope.stream() .map(FilenameUtils::normalizeNoEndSeparator) .map(FilenameUtils::separatorsToUnix) @@ -160,8 +159,8 @@ public List getMatchingEntriesInOrderOfOccurrence( * @throws IOException If the entry cannot be read */ public InputStream getStreamFor( - @NonNull final BarjCargoEntryBoundaries boundary, - @Nullable final SecretKey key) throws IOException { + final @NonNull BarjCargoEntryBoundaries boundary, + final @Nullable SecretKey key) throws IOException { final var files = getFilesFor(boundary); MergingFileInputStream merging = null; InputStream originalDataStream = null; @@ -191,9 +190,9 @@ public InputStream getStreamFor( * @throws IOException If the entry cannot be read */ public InputStream getNextStreamFor( - @NonNull final InputStream mergingInputStream, - @NonNull final BarjCargoEntryBoundaries boundary, - @Nullable final SecretKey key) throws IOException { + final @NonNull InputStream mergingInputStream, + final @NonNull BarjCargoEntryBoundaries boundary, + final @Nullable SecretKey key) throws IOException { InputStream shielded = null; InputStream originalDataStream = null; try { @@ -258,8 +257,8 @@ public InputStream openStreamForSequentialAccess() throws IOException { * @throws IOException If the archive cannot be read */ public InputStream openStreamForSequentialAccess( - @NonNull final List relevantFiles, - @NonNull final List list) throws IOException { + final @NonNull List relevantFiles, + final @NonNull List list) throws IOException { final var fileInputStream = new MergingFileInputStream(relevantFiles); final var start = list.get(0).getContentOrElseMetadata(); final var skip = start.getChunkRelativeStartIndexInclusive(); @@ -277,8 +276,8 @@ public InputStream openStreamForSequentialAccess( * @throws IOException If the index file cannot be read */ protected Properties readProperties( - @NotNull final BarjCargoInputStreamConfiguration config, - @NotNull final Path indexFile) throws IOException { + final @NotNull BarjCargoInputStreamConfiguration config, + final @NotNull Path indexFile) throws IOException { try (var indexStream = new FileInputStream(indexFile.toFile()); var indexBufferedStream = new BufferedInputStream(indexStream); var indexEncryptionStream = newCipherInputStream(config.getIndexDecryptionKey()).decorate(indexBufferedStream); @@ -298,9 +297,8 @@ protected Properties readProperties( * @param properties the properties * @return the entity indexes */ - @NotNull - protected List parseEntityIndexes( - @NotNull final Properties properties) { + protected @NotNull List parseEntityIndexes( + final @NotNull Properties properties) { final var index = parse(properties); return LongStream.rangeClosed(1L, index.getTotalEntities()) .mapToObj(BarjCargoUtil::entryIndexPrefix) @@ -315,10 +313,9 @@ protected List parseEntityIndexes( * @param config the configuration * @return the file path map */ - @NotNull - protected SortedMap generateFilePathMap( - @NotNull final Properties properties, - @NotNull final BarjCargoInputStreamConfiguration config) { + protected @NotNull SortedMap generateFilePathMap( + final @NotNull Properties properties, + final @NotNull BarjCargoInputStreamConfiguration config) { final var index = parse(properties); final var map = new TreeMap(); IntStream.rangeClosed(1, index.getNumberOfChunks()) @@ -337,8 +334,8 @@ protected SortedMap generateFilePathMap( * @throws ArchiveIntegrityException If the archive is in an invalid state */ protected void verifyFilesExistAndHaveExpectedSizes( - @NotNull final Properties properties, - @NotNull final SortedMap chunkPaths) throws ArchiveIntegrityException { + final @NotNull Properties properties, + final @NotNull SortedMap chunkPaths) throws ArchiveIntegrityException { final var index = parse(properties); var totalSize = 0L; final var iterator = chunkPaths.keySet().iterator(); @@ -381,7 +378,7 @@ private static ReadOnlyArchiveIndex parse(final Properties properties) { } private void validateEntityIndexes( - @NotNull final List entityIndexes) { + final @NotNull List entityIndexes) { final var paths = entityIndexes.stream() .map(BarjCargoEntityIndex::getPath) .map(FilenameUtils::separatorsToUnix) @@ -404,32 +401,30 @@ private String getExamples(final Set slippingSet) { return examples; } - @NotNull - private List getAllFiles() { + private @NotNull List getAllFiles() { return chunkPaths.values().stream() .sorted() .toList(); } - @NotNull - private List> restoreTransformationSteps( - @Nullable final SecretKey key, final long skipBytes, final long length) { + private @NotNull List> restoreTransformationSteps( + final @Nullable SecretKey key, final long skipBytes, final long length) { return List.of(input -> new FixedRangeInputStream(input, skipBytes, length), newCipherInputStream(key), decompressionFunction ); } - private List getFilesFor(@NotNull final BarjCargoEntryBoundaries boundary) { + private List getFilesFor(final @NotNull BarjCargoEntryBoundaries boundary) { return chunkPaths.subMap(boundary.getStartChunkName(), boundary.getEndChunkName() + "_include_end") .values().stream() .toList(); } private boolean isArchiveHashValid( - @NotNull final String path, - @NotNull final BarjCargoEntryBoundaries entry, - @NotNull final MergingFileInputStream mergingStream) throws IOException { + final @NotNull String path, + final @NotNull BarjCargoEntryBoundaries entry, + final @NotNull MergingFileInputStream mergingStream) throws IOException { try (var digestCalculatorStream = new OptionalDigestOutputStream(OutputStream.nullOutputStream(), hashAlgorithm)) { final var remaining = entry.getArchivedSizeBytes(); copyNBytes(mergingStream, digestCalculatorStream, remaining); @@ -446,8 +441,8 @@ private boolean isArchiveHashValid( } private void copyNBytes( - @NotNull final MergingFileInputStream from, - @NotNull final OptionalDigestOutputStream to, final long n) throws IOException { + final @NotNull MergingFileInputStream from, + final @NotNull OptionalDigestOutputStream to, final long n) throws IOException { for (var i = n; i > 0; i -= MEBIBYTE) { final var bufferSize = (int) Math.min(MEBIBYTE, i); to.write(from.readNBytes(bufferSize)); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java index a78f858..1db8c70 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java @@ -37,7 +37,7 @@ public class BarjCargoArchiverFileOutputStream extends BaseBarjCargoArchiverFile * @throws IOException If we cannot create the folder or write to it. */ public BarjCargoArchiverFileOutputStream( - @NotNull final BarjCargoOutputStreamConfiguration config) throws IOException { + final @NotNull BarjCargoOutputStreamConfiguration config) throws IOException { super(config); this.indexFile = doCreateFile(toIndexFileName(config.getPrefix())); this.indexStream = new FileOutputStream(indexFile.toFile()); @@ -86,7 +86,7 @@ protected void doOnEntityClosed(final @Nullable BarjCargoEntityIndex entityToInd } } - private void writeEntityToIndex(@NotNull final BarjCargoEntityIndex entityIndex) throws IOException { + private void writeEntityToIndex(final @NotNull BarjCargoEntityIndex entityIndex) throws IOException { try { final var prefix = entryIndexPrefix(entryCount()); this.indexStreamWriter.write(entityIndex.toProperties(prefix)); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoInputStreamConfiguration.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoInputStreamConfiguration.java index ba4b7fb..4e43d7c 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoInputStreamConfiguration.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoInputStreamConfiguration.java @@ -17,18 +17,15 @@ public class BarjCargoInputStreamConfiguration { /** * The folder where the archive's parts are stored. */ - @NonNull - private final Path folder; + private final @NonNull Path folder; /** * The prefix of the archive's parts. */ - @NonNull - private final String prefix; + private final @NonNull String prefix; /** * The function used to compress the archived data. */ - @NonNull - private final IoFunction compressionFunction; + private final @NonNull IoFunction compressionFunction; /** * The algorithm used to hash the entries. */ diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoOutputStreamConfiguration.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoOutputStreamConfiguration.java index 2783e72..2fe3a53 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoOutputStreamConfiguration.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoOutputStreamConfiguration.java @@ -18,18 +18,15 @@ public class BarjCargoOutputStreamConfiguration { /** * The folder where the archive's parts should be stored. */ - @NonNull - private final Path folder; + private final @NonNull Path folder; /** * The prefix of the archive's parts. */ - @NonNull - private final String prefix; + private final @NonNull String prefix; /** * The function used to compress the archived data. */ - @NonNull - private final IoFunction compressionFunction; + private final @NonNull IoFunction compressionFunction; /** * The algorithm used to hash the entries. */ diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java index bf3d179..28bcb24 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoUtil.java @@ -34,7 +34,7 @@ public final class BarjCargoUtil { * @return The chunk file name */ public static String toChunkFileName( - @NonNull final String prefix, final int counter) { + final @NonNull String prefix, final int counter) { if (counter <= 0) { throw new IllegalArgumentException("Invalid counter: " + counter); } @@ -48,7 +48,7 @@ public static String toChunkFileName( * @return The chunk file name */ public static String toIndexFileName( - @NonNull final String prefix) { + final @NonNull String prefix) { return String.format("%s%s%s", prefix, INDEX, CARGO); } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BasicBarjCargoBoundarySource.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BasicBarjCargoBoundarySource.java index 8e46a90..6a00bcf 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BasicBarjCargoBoundarySource.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BasicBarjCargoBoundarySource.java @@ -10,12 +10,9 @@ @Data public class BasicBarjCargoBoundarySource implements BarjCargoBoundarySource { - @NonNull - private final String path; - @NonNull - private final FileType fileType; + private final @NonNull String path; + private final @NonNull FileType fileType; private final boolean encrypted; private final BarjCargoEntryBoundaries contentBoundary; - @NonNull - private final BarjCargoEntryBoundaries metadataBoundary; + private final @NonNull BarjCargoEntryBoundaries metadataBoundary; } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java index f174355..e78db5b 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/IndexVersion.java @@ -17,7 +17,7 @@ public enum IndexVersion { */ V1("1") { @Override - ReadOnlyArchiveIndex createIndex(@NotNull final Properties properties) { + ReadOnlyArchiveIndex createIndex(final @NotNull Properties properties) { return new ArchiveIndexV1(properties); } }, @@ -26,7 +26,7 @@ ReadOnlyArchiveIndex createIndex(@NotNull final Properties properties) { */ V2("2") { @Override - ReadOnlyArchiveIndex createIndex(@NotNull final Properties properties) { + ReadOnlyArchiveIndex createIndex(final @NotNull Properties properties) { return new ArchiveIndexV2(properties); } }; diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java index 8afad8c..25b1237 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java @@ -46,8 +46,8 @@ public class ParallelBarjCargoArchiverFileOutputStream extends BarjCargoArchiver * @throws IOException If we cannot create the folder or write to it. */ public ParallelBarjCargoArchiverFileOutputStream( - @NotNull final BarjCargoOutputStreamConfiguration config, final int threads) - throws IOException { + final @NotNull BarjCargoOutputStreamConfiguration config, + final int threads) throws IOException { super(config); this.tempFileConfig = BarjCargoOutputStreamConfiguration.builder() .folder(config.getFolder().resolve(TEMP_DIR_NAME)) @@ -68,8 +68,9 @@ public ParallelBarjCargoArchiverFileOutputStream( * @return An object with the entity boundaries */ public CompletableFuture addFileEntityAsync( - @NotNull final String path, @NotNull final InputStream contentStream, - @Nullable final SecretKey encryptionKey) { + final @NotNull String path, + final @NotNull InputStream contentStream, + final @Nullable SecretKey encryptionKey) { return addFileEntityAsync(path, contentStream, encryptionKey, null); } @@ -83,8 +84,10 @@ public CompletableFuture addFileEntityAsync( * @return An object with the entity boundaries */ public CompletableFuture addFileEntityAsync( - @NotNull final String path, @NonNull final InputStream contentStream, - @Nullable final SecretKey encryptionKey, @Nullable final String metadata) { + final @NotNull String path, + final @NonNull InputStream contentStream, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) { normalizeAndValidateUniquePathForAsyncCalls(path, FileType.REGULAR_FILE); final var tempStream = new AtomicReference(); return CompletableFuture.supplyAsync(() -> { @@ -110,8 +113,9 @@ public CompletableFuture addFileEntityAsync( * @return An object with the entity boundaries */ public CompletableFuture addSymbolicLinkEntityAsync( - @NotNull final String path, @NotNull final String linkTargetPath, - @Nullable final SecretKey encryptionKey) { + final @NotNull String path, + final @NotNull String linkTargetPath, + final @Nullable SecretKey encryptionKey) { return addSymbolicLinkEntityAsync(path, linkTargetPath, encryptionKey, null); } @@ -125,8 +129,10 @@ public CompletableFuture addSymbolicLinkEntityAsync( * @return An object with the entity boundaries */ public CompletableFuture addSymbolicLinkEntityAsync( - @NotNull final String path, @NonNull final String linkTargetPath, - @Nullable final SecretKey encryptionKey, @Nullable final String metadata) { + final @NotNull String path, + final @NonNull String linkTargetPath, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) { normalizeAndValidateUniquePathForAsyncCalls(path, FileType.SYMBOLIC_LINK); final var tempStream = new AtomicReference(); return CompletableFuture.supplyAsync(() -> { @@ -151,7 +157,8 @@ public CompletableFuture addSymbolicLinkEntityAsync( * @return An object with the entity boundaries */ public CompletableFuture addDirectoryEntityAsync( - @NotNull final String path, @Nullable final SecretKey encryptionKey) { + final @NotNull String path, + final @Nullable SecretKey encryptionKey) { return addDirectoryEntityAsync(path, encryptionKey, null); } @@ -164,8 +171,9 @@ public CompletableFuture addDirectoryEntityAsync( * @return An object with the entity boundaries */ public CompletableFuture addDirectoryEntityAsync( - @NotNull final String path, @Nullable final SecretKey encryptionKey, - @Nullable final String metadata) { + final @NotNull String path, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) { normalizeAndValidateUniquePathForAsyncCalls(path, FileType.DIRECTORY); return CompletableFuture.supplyAsync(() -> { try { @@ -185,8 +193,8 @@ public CompletableFuture addDirectoryEntityAsync( * @return The boundary of the entity */ public CompletableFuture mergeEntityAsync( - @NonNull final BarjCargoBoundarySource boundaryMetadata, - @NonNull final InputStream contentAndMetadataStream) { + final @NonNull BarjCargoBoundarySource boundaryMetadata, + final @NonNull InputStream contentAndMetadataStream) { return CompletableFuture.supplyAsync(() -> { try { return super.mergeEntity(boundaryMetadata, contentAndMetadataStream); @@ -198,8 +206,8 @@ public CompletableFuture mergeEntityAsync( @Override public BarjCargoBoundarySource addFileEntity( - @NotNull final String path, @NotNull final InputStream contentStream, - @Nullable final SecretKey encryptionKey) + final @NotNull String path, final @NotNull InputStream contentStream, + final @Nullable SecretKey encryptionKey) throws IOException { try { return this.addFileEntityAsync(path, contentStream, encryptionKey).join(); @@ -211,9 +219,10 @@ public BarjCargoBoundarySource addFileEntity( @Override public BarjCargoBoundarySource addFileEntity( - @NotNull final String path, @NotNull final InputStream contentStream, - @Nullable final SecretKey encryptionKey, @Nullable final String metadata) - throws IOException { + final @NotNull String path, + final @NotNull InputStream contentStream, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) throws IOException { try { return this.addFileEntityAsync(path, contentStream, encryptionKey, metadata).join(); } catch (final CompletionException ex) { @@ -224,9 +233,9 @@ public BarjCargoBoundarySource addFileEntity( @Override public BarjCargoBoundarySource addSymbolicLinkEntity( - @NotNull final String path, @NotNull final String linkTargetPath, - @Nullable final SecretKey encryptionKey) - throws IOException { + final @NotNull String path, + final @NotNull String linkTargetPath, + final @Nullable SecretKey encryptionKey) throws IOException { try { return this.addSymbolicLinkEntityAsync(path, linkTargetPath, encryptionKey).join(); } catch (final CompletionException ex) { @@ -237,9 +246,10 @@ public BarjCargoBoundarySource addSymbolicLinkEntity( @Override public BarjCargoBoundarySource addSymbolicLinkEntity( - @NotNull final String path, @NotNull final String linkTargetPath, - @Nullable final SecretKey encryptionKey, @Nullable final String metadata) - throws IOException { + final @NotNull String path, + final @NotNull String linkTargetPath, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) throws IOException { try { return this.addSymbolicLinkEntityAsync(path, linkTargetPath, encryptionKey, metadata).join(); } catch (final CompletionException ex) { @@ -250,8 +260,8 @@ public BarjCargoBoundarySource addSymbolicLinkEntity( @Override public BarjCargoBoundarySource addDirectoryEntity( - @NotNull final String path, - @Nullable final SecretKey encryptionKey) throws IOException { + final @NotNull String path, + final @Nullable SecretKey encryptionKey) throws IOException { try { return this.addDirectoryEntityAsync(path, encryptionKey).join(); } catch (final CompletionException ex) { @@ -262,9 +272,9 @@ public BarjCargoBoundarySource addDirectoryEntity( @Override public BarjCargoBoundarySource addDirectoryEntity( - @NotNull final String path, - @Nullable final SecretKey encryptionKey, - @Nullable final String metadata) throws IOException { + final @NotNull String path, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) throws IOException { try { return this.addDirectoryEntityAsync(path, encryptionKey, metadata).join(); } catch (final CompletionException ex) { @@ -275,8 +285,8 @@ public BarjCargoBoundarySource addDirectoryEntity( @Override public BarjCargoBoundarySource mergeEntity( - @NotNull final BarjCargoBoundarySource boundaryMetadata, - @NotNull final InputStream contentAndMetadataStream) throws IOException { + final @NotNull BarjCargoBoundarySource boundaryMetadata, + final @NotNull InputStream contentAndMetadataStream) throws IOException { try { return this.mergeEntityAsync(boundaryMetadata, contentAndMetadataStream).join(); } catch (final CompletionException ex) { @@ -294,7 +304,8 @@ public void close() throws IOException { } private void normalizeAndValidateUniquePathForAsyncCalls( - @NotNull final String path, @NotNull final FileType fileType) { + final @NotNull String path, + final @NotNull FileType fileType) { final var entityPath = normalizeEntityPath(path); assertEntityNameIsValidAndUnique(asyncEntityPaths, entityPath, fileType); } @@ -309,9 +320,8 @@ private void unwrapIoException(final CompletionException ex) throws IOException } } - @NotNull - private Function mergeEntityFromTempStreamAsync( - @NotNull final AtomicReference tempStream) { + private @NotNull Function mergeEntityFromTempStreamAsync( + final @NotNull AtomicReference tempStream) { return entity -> { try { final var stream = tempStream.get(); @@ -338,8 +348,8 @@ private Function mergeEntityFr } private void autoCreateDirectories( - @NotNull final String path, - @NotNull final BaseBarjCargoArchiverFileOutputStream stream) throws IOException { + final @NotNull String path, + final @NotNull BaseBarjCargoArchiverFileOutputStream stream) throws IOException { final var normalizedPath = normalizeEntityPath(path); if (normalizedPath != null) { final var tokens = normalizedPath.split(SLASH); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/crypto/EncryptionUtil.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/crypto/EncryptionUtil.java index 2124db9..30b9941 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/crypto/EncryptionUtil.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/crypto/EncryptionUtil.java @@ -61,7 +61,7 @@ public class EncryptionUtil { * @return the cipher */ public static Cipher createCipher( - @NonNull final SecretKey secretKey, final byte[] ivBytes, final int mode) { + final @NonNull SecretKey secretKey, final byte[] ivBytes, final int mode) { try { final var iv = new GCMParameterSpec(GCM_TAG_LENGTH_BITS, ivBytes); final var cipher = Cipher.getInstance(AES_GCM, BOUNCY_CASTLE_PROVIDER); @@ -79,7 +79,7 @@ public static Cipher createCipher( * @param encrypted the byte array to be decrypted * @return the decrypted byte array */ - public byte[] decryptBytes(@NonNull final PrivateKey privateKey, final byte[] encrypted) { + public byte[] decryptBytes(final @NonNull PrivateKey privateKey, final byte[] encrypted) { try { final var cipher = Cipher.getInstance(RSA_ALG, BOUNCY_CASTLE_PROVIDER); final var oaepParam = new OAEPParameterSpec(SHA_256, MGF_1, SHA256, DEFAULT); @@ -97,7 +97,7 @@ public byte[] decryptBytes(@NonNull final PrivateKey privateKey, final byte[] en * @param bytes the byte array to be encrypted * @return the encrypted byte array */ - public byte[] encryptBytes(@NonNull final PublicKey publicKey, final byte[] bytes) { + public byte[] encryptBytes(final @NonNull PublicKey publicKey, final byte[] bytes) { try { final var cipher = Cipher.getInstance(RSA_ALG, BOUNCY_CASTLE_PROVIDER); final var oaepParam = new OAEPParameterSpec(SHA_256, MGF_1, SHA256, DEFAULT); @@ -135,7 +135,7 @@ public static byte[] generateSecureRandomBytes() { * * @param key the key */ - public static void verifyKeyIsAes256(@NonNull final SecretKey key) { + public static void verifyKeyIsAes256(final @NonNull SecretKey key) { if (key.getEncoded().length < KEY_SIZE_BYTES) { throw new CryptoException("Key must be AES-256."); } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java index db06326..1711b08 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV1.java @@ -24,7 +24,7 @@ public class ArchiveIndexV1 implements ReadOnlyArchiveIndex { private final long lastChunkSizeInBytes; private final long totalSize; - public ArchiveIndexV1(@NotNull final Properties properties) { + public ArchiveIndexV1(final @NotNull Properties properties) { this.properties = properties; this.indexVersion = IndexVersion.forVersionString(properties.getProperty(INDEX_VERSION)); this.totalEntities = Long.parseLong(properties.getProperty(LAST_ENTITY_INDEX_PROPERTY)); @@ -35,7 +35,7 @@ public ArchiveIndexV1(@NotNull final Properties properties) { } @Override - public BarjCargoEntityIndex entity(@NotNull final String prefix) { + public BarjCargoEntityIndex entity(final @NotNull String prefix) { return BarjCargoEntityIndex.fromProperties(properties, prefix); } } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java index 18725af..64a3426 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/index/ArchiveIndexV2.java @@ -28,7 +28,7 @@ public class ArchiveIndexV2 implements ReadOnlyArchiveIndex { private final long lastChunkSizeInBytes; private final long totalSize; - public ArchiveIndexV2(@NotNull final Properties properties) { + public ArchiveIndexV2(final @NotNull Properties properties) { this.properties = properties; this.indexVersion = IndexVersion.forVersionString(properties.getProperty(INDEX_VERSION)); this.totalEntities = Long.parseLong(properties.getProperty(LAST_ENTITY_INDEX_PROPERTY)); @@ -55,7 +55,7 @@ public ArchiveIndexV2( } @Override - public BarjCargoEntityIndex entity(@NotNull final String prefix) { + public BarjCargoEntityIndex entity(final @NotNull String prefix) { return BarjCargoEntityIndex.fromProperties(properties, prefix); } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ArchiveEntryOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ArchiveEntryOutputStream.java index eb71ec2..626f1fa 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ArchiveEntryOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ArchiveEntryOutputStream.java @@ -31,8 +31,8 @@ public class ArchiveEntryOutputStream extends DoOnCloseOutputStream { * @throws IOException When the stream cannot be decorated. */ public ArchiveEntryOutputStream( - @NonNull final BaseBarjCargoArchiverFileOutputStream destinationStream, - @NonNull final IoFunction encryptionFunction) throws IOException { + final @NonNull BaseBarjCargoArchiverFileOutputStream destinationStream, + final @NonNull IoFunction encryptionFunction) throws IOException { this.destinationStream = destinationStream; // save boundary information before anything could interfere with the stream @@ -74,9 +74,8 @@ public BarjCargoEntryBoundaries getEntityBoundary() { return boundaries; } - @NotNull @Override - protected OutputStream getOutputStream() { + protected @NotNull OutputStream getOutputStream() { return originalDigestStream; } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java index 6dd9672..fbdfae8 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java @@ -56,7 +56,7 @@ public class BaseBarjCargoArchiverFileOutputStream extends ChunkingFileOutputStr * @throws IOException If we cannot create the folder or write to it. */ public BaseBarjCargoArchiverFileOutputStream( - @NotNull final BarjCargoOutputStreamConfiguration config) throws IOException { + final @NotNull BarjCargoOutputStreamConfiguration config) throws IOException { super(config.getFolder(), config.getPrefix(), config.getMaxFileSizeMebibyte()); this.compressionFunction = config.getCompressionFunction(); this.hashAlgorithm = config.getHashAlgorithm(); @@ -73,9 +73,9 @@ public BaseBarjCargoArchiverFileOutputStream( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource addFileEntity( - @NotNull final String path, - @NotNull final InputStream contentStream, - @Nullable final SecretKey encryptionKey) throws IOException { + final @NotNull String path, + final @NotNull InputStream contentStream, + final @Nullable SecretKey encryptionKey) throws IOException { return addFileEntity(path, contentStream, encryptionKey, null); } @@ -91,10 +91,10 @@ public BarjCargoBoundarySource addFileEntity( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource addFileEntity( - @NotNull final String path, - @NonNull final InputStream contentStream, - @Nullable final SecretKey encryptionKey, - @Nullable final String metadata) throws IOException { + final @NotNull String path, + final @NonNull InputStream contentStream, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) throws IOException { try (var entity = openEntity(path, FileType.REGULAR_FILE, encryptionKey)) { writeContent(contentStream); writeMetadata(metadata); @@ -114,9 +114,9 @@ public BarjCargoBoundarySource addFileEntity( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource addSymbolicLinkEntity( - @NotNull final String path, - @NotNull final String linkTargetPath, - @Nullable final SecretKey encryptionKey) throws IOException { + final @NotNull String path, + final @NotNull String linkTargetPath, + final @Nullable SecretKey encryptionKey) throws IOException { return addSymbolicLinkEntity(path, linkTargetPath, encryptionKey, null); } @@ -132,10 +132,10 @@ public BarjCargoBoundarySource addSymbolicLinkEntity( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource addSymbolicLinkEntity( - @NotNull final String path, - @NonNull final String linkTargetPath, - @Nullable final SecretKey encryptionKey, - @Nullable final String metadata) throws IOException { + final @NotNull String path, + final @NonNull String linkTargetPath, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) throws IOException { try (var entity = openEntity(path, FileType.SYMBOLIC_LINK, encryptionKey)) { writeContent(new ByteArrayInputStream(linkTargetPath.getBytes(StandardCharsets.UTF_8))); writeMetadata(metadata); @@ -154,8 +154,8 @@ public BarjCargoBoundarySource addSymbolicLinkEntity( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource addDirectoryEntity( - @NotNull final String path, - @Nullable final SecretKey encryptionKey) throws IOException { + final @NotNull String path, + final @Nullable SecretKey encryptionKey) throws IOException { return addDirectoryEntity(path, encryptionKey, null); } @@ -170,9 +170,9 @@ public BarjCargoBoundarySource addDirectoryEntity( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource addDirectoryEntity( - @NotNull final String path, - @Nullable final SecretKey encryptionKey, - @Nullable final String metadata) throws IOException { + final @NotNull String path, + final @Nullable SecretKey encryptionKey, + final @Nullable String metadata) throws IOException { try (var entity = openEntity(path, FileType.DIRECTORY, encryptionKey)) { writeMetadata(metadata); closeCurrentEntity(); @@ -190,8 +190,8 @@ public BarjCargoBoundarySource addDirectoryEntity( * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource mergeEntity( - @NonNull final BarjCargoBoundarySource boundaryMetadata, - @NotNull final InputStream contentAndMetadataStream) throws IOException { + final @NonNull BarjCargoBoundarySource boundaryMetadata, + final @NotNull InputStream contentAndMetadataStream) throws IOException { if (this.hasOpenEntity()) { throw new IllegalStateException("Entity is already open."); } @@ -258,9 +258,8 @@ public void close() throws IOException { * @param fileType The file type * @return The normalized and validated path */ - @Nullable - protected String normalizeAndValidateUniquePath( - @NotNull final String path, @NotNull final FileType fileType) { + protected @Nullable String normalizeAndValidateUniquePath( + final @NotNull String path, final @NotNull FileType fileType) { final var entityPath = normalizeEntityPath(path); assertEntityNameIsValidAndUnique(entityPaths, entityPath, fileType); return entityPath; @@ -306,7 +305,7 @@ protected long entryCount() { * @see #addDirectoryEntity(String, SecretKey, String) */ protected BarjCargoEntityArchiver openEntity( - @NotNull final String archiveEntityPath, @NotNull final FileType fileType, @Nullable final SecretKey encryptionKey) { + final @NotNull String archiveEntityPath, final @NotNull FileType fileType, final @Nullable SecretKey encryptionKey) { if (this.hasOpenEntity()) { throw new IllegalStateException("Entity is already open."); } @@ -348,12 +347,11 @@ protected void closeCurrentEntity() throws IOException { * @param entityToIndex the closed entity's to index * @throws IOException If the entity could not be closed due to an exception@ */ - protected void doOnEntityClosed(@Nullable final BarjCargoEntityIndex entityToIndex) throws IOException { + protected void doOnEntityClosed(final @Nullable BarjCargoEntityIndex entityToIndex) throws IOException { } - @Nullable - protected String normalizeEntityPath(final String archiveEntityPath) { + protected @Nullable String normalizeEntityPath(final String archiveEntityPath) { return separatorsToUnix(normalizeNoEndSeparator(archiveEntityPath)); } @@ -384,9 +382,9 @@ protected void assertEntityNameIsValidAndUnique( } private void doMerge( - @NotNull final BarjCargoEntryBoundaries boundary, - @NotNull final InputStream contentAndMetadataStream, - @NotNull final Consumer resultConsumer) throws IOException { + final @NotNull BarjCargoEntryBoundaries boundary, + final @NotNull InputStream contentAndMetadataStream, + final @NotNull Consumer resultConsumer) throws IOException { final var start = boundary.getAbsoluteStartIndexInclusive(); final var length = boundary.getAbsoluteEndIndexExclusive() - start; final var hash = boundary.getArchivedHash(); @@ -399,8 +397,8 @@ private void doMerge( } private BarjCargoEntryBoundaries mergePart( - @NotNull final BarjCargoEntryBoundaries boundary, - @NonNull final InputStream stream) throws IOException { + final @NotNull BarjCargoEntryBoundaries boundary, + final @NonNull InputStream stream) throws IOException { final var builder = BarjCargoEntryBoundaries.builder() .absoluteStartIndexInclusive(getTotalByteCount()) .startChunkName(getCurrentFilePath().getFileName().toString()) diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingFileOutputStream.java index b2d804b..96f631f 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingFileOutputStream.java @@ -42,8 +42,8 @@ public class ChunkingFileOutputStream extends ChunkingOutputStream { * @throws IOException If we cannot create the folder or write to it. */ public ChunkingFileOutputStream( - @NonNull final Path folder, - @NonNull final String prefix, + final @NonNull Path folder, + final @NonNull String prefix, final int maxFileSizeMebibyte) throws IOException { super(maxFileSizeMebibyte); @@ -60,8 +60,7 @@ public ChunkingFileOutputStream( * * @return files written */ - @NotNull - public List getDataFilesWritten() { + public @NotNull List getDataFilesWritten() { return Collections.unmodifiableList(dataFilesWritten); } @@ -73,8 +72,7 @@ public List getDataFilesWritten() { * @return the path of the file * @throws IOException When the file cannot be created due ot an I/O exception */ - @NotNull - protected Path createDataFile(final String fileName) throws IOException { + protected @NotNull Path createDataFile(final String fileName) throws IOException { final var path = doCreateFile(fileName); this.dataFilesWritten.add(path); return path; @@ -87,8 +85,7 @@ protected Path createDataFile(final String fileName) throws IOException { * @return the path of the file * @throws IOException When the file cannot be created due ot an I/O exception */ - @NotNull - protected Path doCreateFile(final String fileName) throws IOException { + protected @NotNull Path doCreateFile(final String fileName) throws IOException { final var folderName = folder.toAbsolutePath().toString(); final var path = Path.of(folderName, fileName); final var file = path.toFile(); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingOutputStream.java index bd29605..2649174 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/ChunkingOutputStream.java @@ -93,8 +93,7 @@ public void close() throws IOException { * @return the next stream * @throws IOException if we cannot open the next stream */ - @NotNull - protected final OutputStream openNextStream() throws IOException { + protected final @NotNull OutputStream openNextStream() throws IOException { final var byteCount = currentByteCount; byteCountOffset = byteCountOffset + byteCount; currentByteCount = 0L; @@ -108,8 +107,7 @@ protected final OutputStream openNextStream() throws IOException { * @return the next stream * @throws IOException if we cannot open the next stream */ - @NotNull - protected abstract OutputStream doOpenNextStream() throws IOException; + protected abstract @NotNull OutputStream doOpenNextStream() throws IOException; private void doWrite(final byte @NotNull [] b, final int off, final int len) throws IOException { if (len <= 0) { diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CloseShieldingOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CloseShieldingOutputStream.java index 199f9b8..cebb74f 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CloseShieldingOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CloseShieldingOutputStream.java @@ -17,7 +17,7 @@ public class CloseShieldingOutputStream extends OutputStream { * * @param stream The stream to wrap */ - public CloseShieldingOutputStream(@NonNull final OutputStream stream) { + public CloseShieldingOutputStream(final @NonNull OutputStream stream) { this.internal = stream; } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeArchiveStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeArchiveStream.java index 523043b..96c8f20 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeArchiveStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeArchiveStream.java @@ -31,8 +31,8 @@ public class CompositeArchiveStream extends DoOnCloseOutputStream { * @param digestAlgorithm The algorithm we should use for digest calculation. * @throws IOException When the stream cannot be decorated. */ - public CompositeArchiveStream(@NotNull final OutputStream destinationStream, - @Nullable final String digestAlgorithm) throws IOException { + public CompositeArchiveStream(final @NotNull OutputStream destinationStream, + final @Nullable String digestAlgorithm) throws IOException { this(destinationStream, digestAlgorithm, IoFunction.IDENTITY_OUTPUT_STREAM); } @@ -46,9 +46,9 @@ public CompositeArchiveStream(@NotNull final OutputStream destinationStream, * it to the destination. * @throws IOException When the stream cannot be decorated. */ - public CompositeArchiveStream(@NonNull final OutputStream destinationStream, - @Nullable final String digestAlgorithm, - @NonNull final IoFunction transformation) throws IOException { + public CompositeArchiveStream(final @NonNull OutputStream destinationStream, + final @Nullable String digestAlgorithm, + final @NonNull IoFunction transformation) throws IOException { OptionalDigestOutputStream dos = null; CountingOutputStream cos = null; OutputStream ts = null; @@ -86,15 +86,13 @@ public long getByteCount() throws IllegalStateException { * @return The digest value. null if called with the null algorithm. * @throws IllegalStateException When the stream is not closed yet. */ - @Nullable - public String getDigestValue() throws IllegalStateException { + public @Nullable String getDigestValue() throws IllegalStateException { assertClosed(); return digestValue; } - @NotNull @Override - protected OutputStream getOutputStream() { + protected @NotNull OutputStream getOutputStream() { return transformationStream; } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeRestoreStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeRestoreStream.java index d80972d..f3456cc 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeRestoreStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/CompositeRestoreStream.java @@ -37,10 +37,10 @@ public class CompositeRestoreStream extends DoOnCloseInputStream { * read fully. * @throws IOException When the stream cannot be decorated. */ - public CompositeRestoreStream(@NonNull final InputStream sourceStream, - @Nullable final String digestAlgorithm, - @NonNull final List> transformationFunctions, - @Nullable final String expectedDigest) throws IOException { + public CompositeRestoreStream(final @NonNull InputStream sourceStream, + final @Nullable String digestAlgorithm, + final @NonNull List> transformationFunctions, + final @Nullable String expectedDigest) throws IOException { SelfValidatingOptionalDigestInputStream dis = null; BufferedInputStream bis = null; final List ts = new ArrayList<>(); @@ -66,9 +66,8 @@ public CompositeRestoreStream(@NonNull final InputStream sourceStream, } } - @NotNull @Override - protected @NonNull InputStream getInputStream() { + protected @NotNull @NonNull InputStream getInputStream() { return bufferedStream; } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStream.java index f746584..22b1eca 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStream.java @@ -58,8 +58,7 @@ public void close() throws IOException { * * @return the input stream */ - @NotNull - protected abstract InputStream getInputStream(); + protected abstract @NotNull InputStream getInputStream(); /** * Performs some action exactly once when the close method is first called on the stream. diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseOutputStream.java index c112461..d5cd118 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseOutputStream.java @@ -73,8 +73,7 @@ public void close() throws IOException { * * @return the output stream */ - @NotNull - protected abstract OutputStream getOutputStream(); + protected abstract @NotNull OutputStream getOutputStream(); /** * Performs some action exactly once when the close method is first called on the stream. diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/FixedRangeInputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/FixedRangeInputStream.java index b2b7be4..705cd61 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/FixedRangeInputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/FixedRangeInputStream.java @@ -25,7 +25,7 @@ public class FixedRangeInputStream extends BoundedInputStream { */ @SuppressWarnings("deprecation") public FixedRangeInputStream( - @NonNull final InputStream source, final long startInclusive, final long length) + final @NonNull InputStream source, final long startInclusive, final long length) throws IOException { super(source); if (length < 0) { diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingFileInputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingFileInputStream.java index f7f2010..ddd7489 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingFileInputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingFileInputStream.java @@ -27,7 +27,7 @@ public class MergingFileInputStream extends MergingInputStream { * @throws IOException If we cannot create the folder or write to it. */ public MergingFileInputStream( - @NotNull final Path folder, @NotNull final String prefix, @NotNull final String extension) + final @NotNull Path folder, final @NotNull String prefix, final @NotNull String extension) throws IOException { //noinspection resource this(Files.list(folder) @@ -44,7 +44,7 @@ public MergingFileInputStream( * @throws IOException If the streams cannot be open. */ public MergingFileInputStream( - @NotNull final List allFiles) throws IOException { + final @NotNull List allFiles) throws IOException { super(allFiles.stream() .sorted(Comparator.comparing(Path::toAbsolutePath)) .map(path -> (IoSupplier) () -> new FileInputStream(path.toFile())) diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingInputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingInputStream.java index 6ee35a7..d766ea6 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingInputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/MergingInputStream.java @@ -35,8 +35,8 @@ public class MergingInputStream extends InputStream { * @throws IOException If we cannot read the sources. */ public MergingInputStream( - @NonNull final List> allStreams, - @Nullable final Long totalBytes) + final @NonNull List> allStreams, + final @Nullable Long totalBytes) throws IOException { this.remainingBytes = Optional.ofNullable(totalBytes).orElse(UNKNOWN); this.chunkIterator = List.copyOf(allStreams).iterator(); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/SelfValidatingOptionalDigestInputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/SelfValidatingOptionalDigestInputStream.java index d83a085..65a8c46 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/SelfValidatingOptionalDigestInputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/SelfValidatingOptionalDigestInputStream.java @@ -29,9 +29,9 @@ public class SelfValidatingOptionalDigestInputStream extends DigestInputStream { * @param expectedDigest the expected digest of the data read fully. */ public SelfValidatingOptionalDigestInputStream( - @NonNull final InputStream stream, - @Nullable final String algorithm, - @Nullable final String expectedDigest) { + final @NonNull InputStream stream, + final @Nullable String algorithm, + final @Nullable String expectedDigest) { super(stream, Optional.ofNullable(algorithm) .map(DigestUtils::getDigest) .orElse(null)); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java index b72661f..554b6b3 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java @@ -23,7 +23,7 @@ public class TempBarjCargoArchiverFileOutputStream extends BaseBarjCargoArchiver * @throws IOException If we cannot create the folder or write to it. */ public TempBarjCargoArchiverFileOutputStream( - @NotNull final BarjCargoOutputStreamConfiguration config, final String fileName) + final @NotNull BarjCargoOutputStreamConfiguration config, final String fileName) throws IOException { super(BarjCargoOutputStreamConfiguration.builder() .folder(config.getFolder()) @@ -42,7 +42,7 @@ public TempBarjCargoArchiverFileOutputStream( * @throws IOException If the input stream cannot be created */ public InputStream getStream( - @NonNull final BarjCargoEntryBoundaries content, @NonNull final BarjCargoEntryBoundaries metadata) throws IOException { + final @NonNull BarjCargoEntryBoundaries content, final @NonNull BarjCargoEntryBoundaries metadata) throws IOException { final var start = content.getAbsoluteStartIndexInclusive(); final var length = metadata.getAbsoluteEndIndexExclusive() - start; return new FixedRangeInputStream(new MergingFileInputStream(getDataFilesWritten()), start, length); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java index 2c0e34c..64b2851 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java @@ -28,14 +28,11 @@ public class BarjCargoEntityIndex implements BarjCargoBoundarySource { private static final String CONTENT = ".content"; private static final String METADATA = ".metadata"; - @NonNull - private final String path; - @NonNull - private final FileType fileType; + private final @NonNull String path; + private final @NonNull FileType fileType; private final boolean encrypted; private final BarjCargoEntryBoundaries content; - @NonNull - private final BarjCargoEntryBoundaries metadata; + private final @NonNull BarjCargoEntryBoundaries metadata; /** * Returns the first relevant boundaries from the content or metadata part of the entity. diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntryBoundaries.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntryBoundaries.java index f875536..482ae4a 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntryBoundaries.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntryBoundaries.java @@ -33,10 +33,8 @@ public class BarjCargoEntryBoundaries { private final long chunkRelativeStartIndexInclusive; private final long chunkRelativeEndIndexExclusive; - @NonNull - private final String startChunkName; - @NonNull - private final String endChunkName; + private final @NonNull String startChunkName; + private final @NonNull String endChunkName; private final long absoluteStartIndexInclusive; private final long absoluteEndIndexExclusive; private final String originalHash; diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java index 33025ed..fa45ce2 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java @@ -24,10 +24,8 @@ @ToString public class RandomAccessBarjCargoArchiveEntry implements BarjCargoArchiveEntry { - @NonNull - private final BarjCargoArchiveFileInputStreamSource source; - @NonNull - private final BarjCargoEntityIndex entityIndex; + private final @NonNull BarjCargoArchiveFileInputStreamSource source; + private final @NonNull BarjCargoEntityIndex entityIndex; @Override public String getPath() { @@ -40,8 +38,7 @@ public FileType getFileType() { } @Override - @NotNull - public InputStream getFileContent(@Nullable final SecretKey key) throws IOException { + public @NotNull InputStream getFileContent(final @Nullable SecretKey key) throws IOException { if (getFileType() != FileType.REGULAR_FILE) { throw new IllegalArgumentException("Must be called with a regular file!"); } @@ -49,8 +46,7 @@ public InputStream getFileContent(@Nullable final SecretKey key) throws IOExcept } @Override - @NotNull - public String getLinkTarget(@Nullable final SecretKey key) throws IOException { + public @NotNull String getLinkTarget(final @Nullable SecretKey key) throws IOException { if (getFileType() != FileType.SYMBOLIC_LINK) { throw new IllegalArgumentException("Must be called with a symbolic link!"); } @@ -60,8 +56,7 @@ public String getLinkTarget(@Nullable final SecretKey key) throws IOException { } @Override - @Nullable - public String getMetadata(@Nullable final SecretKey key) throws IOException { + public @Nullable String getMetadata(final @Nullable SecretKey key) throws IOException { if (entityIndex.getMetadata().getOriginalSizeBytes() == 0) { return null; } @@ -71,8 +66,7 @@ public String getMetadata(@Nullable final SecretKey key) throws IOException { } @Override - @NotNull - public InputStream getRawContentAndMetadata() throws IOException { + public @NotNull InputStream getRawContentAndMetadata() throws IOException { final var start = entityIndex.getContentOrElseMetadata().getAbsoluteStartIndexInclusive(); final var length = entityIndex.getMetadata().getAbsoluteEndIndexExclusive() - start; return new FixedRangeInputStream(source.openStreamForSequentialAccess(), start, length); diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java index 7cc552e..00a9f2c 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java @@ -25,12 +25,10 @@ @ToString public class SequentialBarjCargoArchiveEntry implements BarjCargoArchiveEntry { - @NonNull - private final BarjCargoArchiveFileInputStreamSource source; + private final @NonNull BarjCargoArchiveFileInputStreamSource source; private final BarjCargoArchiveEntryIterator iterator; @Getter - @NonNull - private final BarjCargoEntityIndex entityIndex; + private final @NonNull BarjCargoEntityIndex entityIndex; /** * Creates an instance and prepares it for iteration. @@ -39,9 +37,9 @@ public class SequentialBarjCargoArchiveEntry implements BarjCargoArchiveEntry { * @param iterator The iterator * @param entityIndex The index describing the entry's location in the archive */ - public SequentialBarjCargoArchiveEntry(@NonNull final BarjCargoArchiveFileInputStreamSource source, - @NonNull final BarjCargoArchiveEntryIterator iterator, - @NonNull final BarjCargoEntityIndex entityIndex) { + public SequentialBarjCargoArchiveEntry(final @NonNull BarjCargoArchiveFileInputStreamSource source, + final @NonNull BarjCargoArchiveEntryIterator iterator, + final @NonNull BarjCargoEntityIndex entityIndex) { this.source = source; this.iterator = iterator; this.entityIndex = entityIndex; @@ -58,8 +56,7 @@ public FileType getFileType() { } @Override - @NotNull - public InputStream getFileContent(@Nullable final SecretKey key) throws IOException { + public @NotNull InputStream getFileContent(final @Nullable SecretKey key) throws IOException { if (getFileType() != FileType.REGULAR_FILE) { throw new IllegalArgumentException("Must be called with a regular file!"); } @@ -67,8 +64,7 @@ public InputStream getFileContent(@Nullable final SecretKey key) throws IOExcept } @Override - @NotNull - public String getLinkTarget(@Nullable final SecretKey key) throws IOException { + public @NotNull String getLinkTarget(final @Nullable SecretKey key) throws IOException { if (getFileType() != FileType.SYMBOLIC_LINK) { throw new IllegalArgumentException("Must be called with a symbolic link!"); } @@ -78,8 +74,7 @@ public String getLinkTarget(@Nullable final SecretKey key) throws IOException { } @Override - @Nullable - public String getMetadata(@Nullable final SecretKey key) throws IOException { + public @Nullable String getMetadata(final @Nullable SecretKey key) throws IOException { final var metadata = entityIndex.getMetadata(); if (metadata.getArchivedSizeBytes() == 0) { return null; @@ -94,8 +89,7 @@ public String getMetadata(@Nullable final SecretKey key) throws IOException { } @Override - @NotNull - public InputStream getRawContentAndMetadata() throws IOException { + public @NotNull InputStream getRawContentAndMetadata() throws IOException { final var start = entityIndex.getContentOrElseMetadata().getAbsoluteStartIndexInclusive(); final var length = entityIndex.getMetadata().getAbsoluteEndIndexExclusive() - start; return CloseShieldInputStream.wrap(new FixedRangeInputStream(iterator.getStream(), 0, length)); diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSourceIntegrationTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSourceIntegrationTest.java index b06deef..10c94ee 100644 --- a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSourceIntegrationTest.java +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiveFileInputStreamSourceIntegrationTest.java @@ -836,7 +836,7 @@ void testVerifyHashesShouldReturnWithoutVerificationWhenHashAlgorithmIsNull() th //given final var outConfig = getBarjCargoOutputStreamConfiguration(null, IoFunction.IDENTITY_OUTPUT_STREAM); try (var out = new BarjCargoArchiverFileOutputStream(outConfig)) { - out.addFileEntity("/" + UUID.randomUUID().toString(), new ByteArrayInputStream(RANDOM_DATA), null); + out.addFileEntity("/" + UUID.randomUUID(), new ByteArrayInputStream(RANDOM_DATA), null); } final var inConfig = getBarjCargoInputStreamConfiguration(null, IoFunction.IDENTITY_INPUT_STREAM); @@ -853,7 +853,7 @@ void testVerifyHashesShouldVerifyHashesWhenHashAlgorithmIsSha256() throws IOExce //given final var outConfig = getBarjCargoOutputStreamConfiguration(SHA_256, GzipCompressorOutputStream::new); try (var out = new BarjCargoArchiverFileOutputStream(outConfig)) { - out.addFileEntity("/" + UUID.randomUUID().toString(), new ByteArrayInputStream(RANDOM_DATA), null); + out.addFileEntity("/" + UUID.randomUUID(), new ByteArrayInputStream(RANDOM_DATA), null); } final var inConfig = getBarjCargoInputStreamConfiguration(SHA_256, GzipCompressorInputStream::new); @@ -871,7 +871,7 @@ void testVerifyHashesShouldThrowExceptionWhenTheFileOrderIsChanged() throws IOEx //given final var outConfig = getBarjCargoOutputStreamConfiguration(SHA_256, GzipCompressorOutputStream::new); try (var out = new BarjCargoArchiverFileOutputStream(outConfig)) { - out.addFileEntity("/" + UUID.randomUUID().toString(), new ByteArrayInputStream(RANDOM_DATA), null); + out.addFileEntity("/" + UUID.randomUUID(), new ByteArrayInputStream(RANDOM_DATA), null); out.close(); final var written = out.getDataFilesWritten(); final var firstPath = written.get(0); @@ -896,7 +896,7 @@ void testVerifyHashesShouldThrowExceptionWhenTheStreamDoesNotEndAfterTheLastEntr //given final var outConfig = getBarjCargoOutputStreamConfiguration(SHA_256, GzipCompressorOutputStream::new); try (var out = new BarjCargoArchiverFileOutputStream(outConfig)) { - out.addFileEntity("/" + UUID.randomUUID().toString(), new ByteArrayInputStream(RANDOM_DATA), null); + out.addFileEntity("/" + UUID.randomUUID(), new ByteArrayInputStream(RANDOM_DATA), null); out.write(1); } @@ -914,7 +914,7 @@ void testConstructorShouldThrowExceptionWhenTheLastFileIsMissing() throws IOExce //given final var outConfig = getBarjCargoOutputStreamConfiguration(SHA_256, GzipCompressorOutputStream::new); try (var out = new BarjCargoArchiverFileOutputStream(outConfig)) { - out.addFileEntity("/" + UUID.randomUUID().toString(), new ByteArrayInputStream(RANDOM_DATA), null); + out.addFileEntity("/" + UUID.randomUUID(), new ByteArrayInputStream(RANDOM_DATA), null); out.close(); Assertions.assertTrue(out.getCurrentFilePath().toFile().delete()); } @@ -932,7 +932,7 @@ void testConstructorShouldThrowExceptionWhenTheSizeOfTheLastFileDoesNotMatch() t //given final var outConfig = getBarjCargoOutputStreamConfiguration(SHA_256, GzipCompressorOutputStream::new); try (var out = new BarjCargoArchiverFileOutputStream(outConfig)) { - out.addFileEntity("/" + UUID.randomUUID().toString(), new ByteArrayInputStream(RANDOM_DATA), null); + out.addFileEntity("/" + UUID.randomUUID(), new ByteArrayInputStream(RANDOM_DATA), null); out.close(); try (var hack = new FileOutputStream(out.getCurrentFilePath().toFile(), true)) { hack.write(1); diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStreamTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStreamTest.java index 815ff65..516deb8 100644 --- a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStreamTest.java +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/DoOnCloseInputStreamTest.java @@ -199,9 +199,8 @@ private static class TestDoOnCloseInputStream extends DoOnCloseInputStream { onCloseCalled = 0; } - @NonNull @Override - protected InputStream getInputStream() { + protected @NonNull InputStream getInputStream() { return stream; }