diff --git a/README.md b/README.md index 14e0ac1..379c357 100644 --- a/README.md +++ b/README.md @@ -46,10 +46,11 @@ File BaRJ comes with the following features - Inspect content of a backup increment - Duplicate handling (storing duplicates of the same file only once) - Deletes left-over files from the restore directory (if they had been in scope for the backup) +- Merge previous backup increments ### Planned features -- Merge previous backup increments +- Delete selected backup increments - UI for convenient configuration ## Modules diff --git a/file-barj-core/README.md b/file-barj-core/README.md index fc0dd63..4fc9d1f 100644 --- a/file-barj-core/README.md +++ b/file-barj-core/README.md @@ -63,6 +63,20 @@ final var backupController = new BackupController(configuration, false); backupController.execute(1); ``` +### Merging increments + +```java +final var mergeController = new MergeController( + Path.of("/tmp/backup"), + "prefix", + null, //optional key encryption key + 123L, //Backup start epoch seconds for the first file of the range (inclusive) + 234L //Backup start epoch seconds for the last file of the range (inclusive) +); + +mergeController.execute(false); +``` + ### Reading an archive ```java diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManager.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManager.java index 521ed85..c6c1815 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManager.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManager.java @@ -31,7 +31,7 @@ BackupIncrementManifest generateManifest( int nextVersion); /** - * Persists the provided manifest.to the hard drive in two copies (one encrypted that can be + * Persists the provided manifest to the hard drive in two copies (one encrypted that can be * moved to a safe location and one unencrypted in the history folder to allow incremental * backup jobs to function automatically without knowing the private keys). * @@ -39,6 +39,19 @@ BackupIncrementManifest generateManifest( */ void persist(@NonNull BackupIncrementManifest manifest); + /** + * Persists the provided manifest to the hard drive in two copies (one encrypted that can be + * moved to a safe location and one unencrypted in the history folder to allow incremental + * backup jobs to function automatically without knowing the private keys). + * The aforementioned files will be stored relative to the provided backup destination. + * + * @param manifest The manifest to persist + * @param backupDestination the backup destination + */ + void persist( + @NonNull BackupIncrementManifest manifest, + @NonNull Path backupDestination); + /** * Loads the manifests which belong to the provided backup. Only includes manifests starting * with the latest full backup before the provided time stamp. @@ -61,10 +74,10 @@ SortedMap load( * Loads all manifests which belong to the provided backup. Contains manifests for all * increments even if many full backups have been created. * - * @param destinationDirectory the directory where the backup files are stored - * @param fileNamePrefix the prefix of the backup files - * @param privateKey the RSA key we want to use to decrypt the manifests (optional). - * If null, the manifests will not be decrypted. + * @param destinationDirectory the directory where the backup files are stored + * @param fileNamePrefix the prefix of the backup files + * @param privateKey the RSA key we want to use to decrypt the manifests (optional). + * If null, the manifests will not be decrypted. * @return the map of loaded manifests keyed by their timestamps */ SortedMap loadAll( diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java index c56f514..3a63a71 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImpl.java @@ -61,8 +61,21 @@ public BackupIncrementManifest generateManifest( @Override public void persist( @NonNull final BackupIncrementManifest manifest) { + final var backupDestination = manifest.getConfiguration().getDestinationDirectory(); + persist(manifest, backupDestination); + } + + @Override + public void persist( + @NonNull final BackupIncrementManifest manifest, + @NonNull final Path backupDestination) { validate(manifest, ValidationRules.Persisted.class); - final var backupDestination = manifest.getConfiguration().getDestinationDirectory().toFile(); + doPersist(manifest, backupDestination.toFile()); + } + + private void doPersist( + @NotNull final BackupIncrementManifest manifest, + @NotNull final File backupDestination) { final var backupHistoryDir = new File(backupDestination, ".history"); //noinspection ResultOfMethodCallIgnored backupHistoryDir.mkdirs(); @@ -237,6 +250,9 @@ private SortedMap loadAllManifests( log.warn("Failed to load manifest file: {}", path, e); } } + if (manifests.isEmpty()) { + throw new ArchivalException("No manifests found."); + } return manifests; } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java index aa49f38..47e10b9 100644 --- a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverter.java @@ -2,8 +2,11 @@ import com.github.nagyesta.filebarj.core.model.BackupIncrementManifest; import com.github.nagyesta.filebarj.core.model.FileMetadata; +import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.util.LogUtil; import com.github.nagyesta.filebarj.io.stream.internal.ChunkingOutputStream; import lombok.NonNull; +import org.jetbrains.annotations.NotNull; import java.time.Instant; import java.util.Optional; @@ -23,7 +26,7 @@ public String convertToSummaryString(final @NonNull BackupIncrementManifest mani final var epochSeconds = manifest.getStartTimeUtcEpochSeconds(); final var totalSize = manifest.getFiles().values().stream() .mapToLong(FileMetadata::getOriginalSizeBytes).sum() / ChunkingOutputStream.MEBIBYTE; - return manifest.getBackupType().name() + " backup: " + manifest.getFileNamePrefix() + "\n" + return getFormattedType(manifest) + " backup: " + manifest.getFileNamePrefix() + "\n" + "\tStarted at : " + Instant.ofEpochSecond(epochSeconds) + " (Epoch seconds: " + epochSeconds + ")\n" + "\tContains " + manifest.getFiles().size() + " files (" + totalSize + " MiB)\n" + "\tVersions : " + manifest.getVersions() + "\n" @@ -32,4 +35,12 @@ public String convertToSummaryString(final @NonNull BackupIncrementManifest mani + "\tHash alg. : " + manifest.getConfiguration().getHashAlgorithm().name() + "\n" + "\tCompression: " + manifest.getConfiguration().getCompression().name(); } + + @NotNull + private String getFormattedType(@NotNull final BackupIncrementManifest manifest) { + if (manifest.getBackupType() == BackupType.INCREMENTAL) { + return manifest.getBackupType().name(); + } + return LogUtil.scary(manifest.getBackupType().name()); + } } diff --git a/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java new file mode 100644 index 0000000..2890dec --- /dev/null +++ b/file-barj-core/src/main/java/com/github/nagyesta/filebarj/core/merge/MergeController.java @@ -0,0 +1,261 @@ +package com.github.nagyesta.filebarj.core.merge; + +import com.github.nagyesta.filebarj.core.backup.ArchivalException; +import com.github.nagyesta.filebarj.core.common.ManifestManager; +import com.github.nagyesta.filebarj.core.common.ManifestManagerImpl; +import com.github.nagyesta.filebarj.core.model.*; +import com.github.nagyesta.filebarj.core.model.enums.BackupType; +import com.github.nagyesta.filebarj.core.util.LogUtil; +import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiveFileInputStreamSource; +import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiverFileOutputStream; +import com.github.nagyesta.filebarj.io.stream.BarjCargoInputStreamConfiguration; +import com.github.nagyesta.filebarj.io.stream.BarjCargoOutputStreamConfiguration; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.PrivateKey; +import java.util.*; +import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; + +/** + * Controller implementation for the merge process. + */ +@Slf4j +public class MergeController { + private final ManifestManager manifestManager; + private final RestoreManifest mergedManifest; + private final SortedMap selectedManifests; + private final SortedMap manifestsToMerge; + private final PrivateKey kek; + private final Path backupDirectory; + private final ReentrantLock executionLock = new ReentrantLock(); + + /** + * Creates a new instance and initializes it for the merge. + * + * @param backupDirectory the directory where the backup files are located + * @param fileNamePrefix the prefix of the backup file names + * @param kek The key encryption key we want to use to decrypt and encrypt + * the files (optional). + * @param rangeStartEpochSeconds the start of the range to merge (inclusive) + * @param rangeEndEpochSeconds the end of the range to merge (inclusive) + */ + public MergeController( + @NonNull final Path backupDirectory, + @NonNull final String fileNamePrefix, + @Nullable final PrivateKey kek, + final long rangeStartEpochSeconds, + final long rangeEndEpochSeconds) { + if (rangeEndEpochSeconds <= rangeStartEpochSeconds) { + throw new IllegalArgumentException( + "Invalid range selected for merge! start=" + rangeEndEpochSeconds + ", end=" + rangeStartEpochSeconds); + } + this.kek = kek; + this.backupDirectory = backupDirectory; + manifestManager = new ManifestManagerImpl(); + log.info("Loading backup manifests for merge from: {}", backupDirectory); + final var manifests = manifestManager.loadAll(this.backupDirectory, fileNamePrefix, kek); + selectedManifests = filterToSelection(manifests, rangeStartEpochSeconds, rangeEndEpochSeconds); + log.info("Selected {} manifests", selectedManifests.size()); + manifestsToMerge = keepManifestsSinceLastFullBackupOfTheSelection(selectedManifests); + mergedManifest = manifestManager.mergeForRestore(manifestsToMerge); + final var filesOfLastManifest = mergedManifest.getFilesOfLastManifest(); + LogUtil.logStatistics(filesOfLastManifest.values(), + (type, count) -> log.info("Found {} {} items in merged backup", count, type)); + } + + /** + * Execute the merge. If deleteObsoleteFiles is true, the original manifests and backup files + * which are no longer needed will be deleted. + * + * @param deleteObsoleteFiles whether to delete obsolete files from the backup directory + * @return the merged manifest + */ + public BackupIncrementManifest execute(final boolean deleteObsoleteFiles) { + executionLock.lock(); + try { + final var result = mergeBackupContent(); + manifestManager.persist(result, backupDirectory); + if (deleteObsoleteFiles) { + log.info("Deleting obsolete files from backup directory: {}", backupDirectory); + selectedManifests.values().forEach(manifest -> { + final var fileNamePrefix = manifest.getFileNamePrefix(); + deleteManifestFromHistoryIfExists(fileNamePrefix); + deleteManifestAndArchiveFilesFromBackupDirectory(fileNamePrefix); + }); + } + return result; + } finally { + executionLock.unlock(); + } + } + + private void deleteManifestAndArchiveFilesFromBackupDirectory(@NotNull final String fileNamePrefix) { + final var patterns = Set.of( + "^" + fileNamePrefix + "\\.[0-9]{5}\\.cargo$", + "^" + fileNamePrefix + "\\.manifest\\.cargo$", + "^" + fileNamePrefix + "\\.index\\.cargo$" + ); + try (var list = Files.list(backupDirectory)) { + final var toDelete = new ArrayList(); + list.filter(path -> patterns.stream().anyMatch(pattern -> path.getFileName().toString().matches(pattern))) + .forEach(toDelete::add); + for (final var path : toDelete) { + log.info("Deleting obsolete file: {}", path); + Files.delete(path); + } + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + + private void deleteManifestFromHistoryIfExists(@NotNull final String fileNamePrefix) { + final var fromHistory = backupDirectory.resolve(".history") + .resolve(fileNamePrefix + ".manifest.json.gz"); + try { + if (Files.exists(fromHistory)) { + log.info("Deleting obsolete file from history: {}", fromHistory); + Files.delete(fromHistory); + } + } catch (final IOException e) { + log.error("Could not delete manifest file from history folder: " + fromHistory, e); + } + } + + @NotNull + private BackupIncrementManifest mergeBackupContent() { + final var lastManifest = manifestsToMerge.get(manifestsToMerge.lastKey()); + final var firstManifest = manifestsToMerge.get(manifestsToMerge.firstKey()); + final var result = BackupIncrementManifest.builder() + .backupType(firstManifest.getBackupType()) + .startTimeUtcEpochSeconds(mergedManifest.getLastStartTimeUtcEpochSeconds()) + .configuration(mergedManifest.getConfiguration()) + .appVersion(new AppVersion()) + .fileNamePrefix(firstManifest.getConfiguration().getFileNamePrefix() + + "-" + firstManifest.getStartTimeUtcEpochSeconds() + + "-" + lastManifest.getStartTimeUtcEpochSeconds()) + .encryptionKeys(mergedManifest.getEncryptionKeys()) + .operatingSystem(lastManifest.getOperatingSystem()) + .versions(mergedManifest.getVersions()) + .files(mergedManifest.getFilesOfLastManifest()) + .archivedEntries(mergedManifest.getArchivedEntriesOfLastManifest()) + .build(); + final var outputStreamConfiguration = BarjCargoOutputStreamConfiguration.builder() + .compressionFunction(result.getConfiguration().getCompression()::decorateOutputStream) + .prefix(result.getFileNamePrefix()) + .folder(backupDirectory) + .hashAlgorithm(result.getConfiguration().getHashAlgorithm().getAlgorithmName()) + .indexEncryptionKey(Optional.ofNullable(kek) + .map(key -> result.dataIndexDecryptionKey(kek, result.getVersions().first())) + .orElse(null)) + .maxFileSizeMebibyte(result.getConfiguration().getChunkSizeMebibyte()) + .build(); + try (var output = new BarjCargoArchiverFileOutputStream(outputStreamConfiguration)) { + createDirectoriesForEachVersion(result, output); + final var manifests = manifestsToMerge.values().stream().distinct().toList(); + for (final var currentManifest : manifests) { + mergeContentEntriesFromManifest(currentManifest, result, output); + } + output.close(); + result.setIndexFileName(output.getIndexFileWritten().getFileName().toString()); + result.setDataFileNames(output.getDataFilesWritten().stream().map(Path::getFileName).map(Path::toString).toList()); + } catch (final IOException e) { + throw new ArchivalException("Failed to merge backup increments.", e); + } + return result; + } + + @NotNull + private SortedMap filterToSelection( + @NotNull final SortedMap manifests, + final long rangeStartEpochSeconds, + final long rangeEndEpochSeconds) { + if (!manifests.containsKey(rangeStartEpochSeconds)) { + throw new IllegalArgumentException("No manifest found with the provided start time: " + rangeStartEpochSeconds); + } + if (!manifests.containsKey(rangeEndEpochSeconds)) { + throw new IllegalArgumentException("No manifest found with the provided end time: " + rangeEndEpochSeconds); + } + return manifests.headMap(rangeEndEpochSeconds + 1).tailMap(rangeStartEpochSeconds); + } + + @NotNull + private SortedMap keepManifestsSinceLastFullBackupOfTheSelection( + @NotNull final SortedMap selected) { + final SortedMap result = new TreeMap<>(); + final var inReverseOrder = selected.values().stream() + .sorted(Comparator.comparingLong(BackupIncrementManifest::getStartTimeUtcEpochSeconds).reversed()) + .toList(); + for (final var manifest : inReverseOrder) { + manifest.getVersions().forEach(version -> result.put(version, manifest)); + if (manifest.getBackupType() == BackupType.FULL) { + if (manifest.getStartTimeUtcEpochSeconds() > selected.firstKey()) { + log.warn("Skipping merge for manifests before the latest full backup: {}", manifest.getStartTimeUtcEpochSeconds()); + } + break; + } + } + return result; + } + + private void createDirectoriesForEachVersion( + final BackupIncrementManifest result, final BarjCargoArchiverFileOutputStream output) { + result.getVersions().forEach(version -> { + try { + output.addDirectoryEntity("/" + version, null); + } catch (final IOException e) { + throw new ArchivalException("Failed to add directory entity for version " + version, e); + } + }); + } + + private void mergeContentEntriesFromManifest( + final BackupIncrementManifest currentManifest, + final BackupIncrementManifest result, + final BarjCargoArchiverFileOutputStream output) throws IOException { + final var relevantEntries = filterEntities(currentManifest, result); + final var inputStreamSource = new BarjCargoArchiveFileInputStreamSource(getStreamConfig(currentManifest, kek)); + try (var iterator = inputStreamSource.getIteratorForScope(relevantEntries)) { + while (iterator.hasNext()) { + final var currentEntry = iterator.next(); + if (relevantEntries.contains(currentEntry.getPath())) { + output.mergeEntity(currentEntry.getEntityIndex(), currentEntry.getRawContentAndMetadata()); + } else { + currentEntry.skipContent(); + currentEntry.skipMetadata(); + } + } + } + } + + @NonNull + private Set filterEntities( + final BackupIncrementManifest currentManifest, + final BackupIncrementManifest result) { + return result.getArchivedEntries().values().stream() + .map(ArchivedFileMetadata::getArchiveLocation) + .filter(archiveLocation -> currentManifest.getVersions().contains(archiveLocation.getBackupIncrement())) + .map(ArchiveEntryLocator::asEntryPath) + .collect(Collectors.toSet()); + } + + private BarjCargoInputStreamConfiguration getStreamConfig( + final BackupIncrementManifest currentManifest, final PrivateKey kek) { + final var decryptionKey = Optional.ofNullable(kek) + .map(key -> currentManifest.dataIndexDecryptionKey(key, currentManifest.getVersions().first())) + .orElse(null); + return BarjCargoInputStreamConfiguration.builder() + .compressionFunction(currentManifest.getConfiguration().getCompression()::decorateInputStream) + .prefix(currentManifest.getFileNamePrefix()) + .folder(backupDirectory) + .hashAlgorithm(currentManifest.getConfiguration().getHashAlgorithm().getAlgorithmName()) + .indexDecryptionKey(decryptionKey) + .build(); + } +} diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/TempFileAwareTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/TempFileAwareTest.java index b382433..da76371 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/TempFileAwareTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/TempFileAwareTest.java @@ -5,9 +5,11 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import java.io.File; import java.io.IOException; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; +import java.util.Set; import java.util.UUID; public abstract class TempFileAwareTest { @@ -55,4 +57,22 @@ public FileVisitResult visitFile(final Path file, final BasicFileAttributes attr }); } } + + /** + * Copies the selected backup files to the test directory. + * + * @param prefixes The prefixes of the backup files + * @param backupPath The path of the backup directory + * @throws IOException If the files cannot be copied + */ + @SuppressWarnings("DataFlowIssue") + protected void prepareBackupFiles(final Set prefixes, final Path backupPath) throws IOException { + for (final var prefix : prefixes) { + final var backupFiles = Set.of(prefix + ".00001.cargo", prefix + ".index.cargo", prefix + ".manifest.cargo"); + for (final var filename : backupFiles) { + final var path = new File(getClass().getResource("/backups/" + filename).getFile()).toPath().toAbsolutePath(); + Files.copy(path, backupPath.resolve(filename)); + } + } + } } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java index 72d9b17..fd85066 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/common/ManifestManagerImplTest.java @@ -7,6 +7,7 @@ import com.github.nagyesta.filebarj.core.config.enums.CompressionAlgorithm; import com.github.nagyesta.filebarj.core.config.enums.DuplicateHandlingStrategy; import com.github.nagyesta.filebarj.core.config.enums.HashAlgorithm; +import com.github.nagyesta.filebarj.core.model.BackupIncrementManifest; import com.github.nagyesta.filebarj.core.model.BackupPath; import com.github.nagyesta.filebarj.core.model.ValidationRules; import com.github.nagyesta.filebarj.core.model.enums.BackupType; @@ -20,6 +21,8 @@ import java.time.Instant; import java.util.Set; +import static org.mockito.Mockito.mock; + class ManifestManagerImplTest extends TempFileAwareTest { public static final int A_SECOND = 1000; private final BackupJobConfiguration configuration = BackupJobConfiguration.builder() @@ -253,6 +256,31 @@ void testPersistShouldThrowExceptionWhenCalledWithNull() { //then + exception } + @SuppressWarnings("DataFlowIssue") + @Test + void testPersistShouldThrowExceptionWhenCalledWithNullManifest() { + //given + final var underTest = new ManifestManagerImpl(); + + //when + Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.persist(null, Path.of("destination"))); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testPersistShouldThrowExceptionWhenCalledWithNullDestination() { + //given + final var underTest = new ManifestManagerImpl(); + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> underTest.persist(mock(BackupIncrementManifest.class), null)); + + //then + exception + } + @SuppressWarnings("DataFlowIssue") @Test void testLoadShouldThrowExceptionWhenCalledWithNullDirectory() { diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java index 4548771..8802009 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/pipeline/IncrementInspectionControllerTest.java @@ -134,7 +134,7 @@ void testInspectIncrementsShouldReturnSummariesWhenCalledWithStream() throws IOE //then final var actualContents = byteArrayOutputStream.toString(StandardCharsets.UTF_8); final var actualCount = actualContents.lines() - .filter(line -> line.startsWith("FULL")) + .filter(line -> line.startsWith("\033[0;31mFULL\033[0;0m")) .count(); Assertions.assertEquals(BACKUP_COUNT, actualCount); } diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverterTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverterTest.java index af4a491..99f1630 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverterTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/inspect/worker/ManifestToSummaryConverterTest.java @@ -89,7 +89,7 @@ void testConvertToSummaryStringShouldReturnExpectedValueWhenCalledWithValidManif //then Assertions.assertEquals(""" - FULL backup: prefix + \033[0;31mFULL\033[0;0m backup: prefix \tStarted at : 1970-01-01T00:00:01Z (Epoch seconds: 1) \tContains 1 files (2 MiB) \tVersions : [0] diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java new file mode 100644 index 0000000..2c09c97 --- /dev/null +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerIntegrationTest.java @@ -0,0 +1,504 @@ +package com.github.nagyesta.filebarj.core.merge; + +import com.github.nagyesta.filebarj.core.TempFileAwareTest; +import com.github.nagyesta.filebarj.core.common.PermissionComparisonStrategy; +import com.github.nagyesta.filebarj.core.config.RestoreTarget; +import com.github.nagyesta.filebarj.core.config.RestoreTargets; +import com.github.nagyesta.filebarj.core.config.RestoreTask; +import com.github.nagyesta.filebarj.core.model.BackupPath; +import com.github.nagyesta.filebarj.core.restore.pipeline.RestoreController; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.OS; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.security.PrivateKey; +import java.util.Map; +import java.util.Set; +import java.util.stream.Stream; + +public class MergeControllerIntegrationTest extends TempFileAwareTest { + + private static final String DASH = "-"; + private static final PrivateKey KEK = getKek(); + private static final long B_FIRST_FULL = 1707595719L; + private static final long B_INCREMENT_1 = 1708783849L; + private static final long B_INCREMENT_2 = 1708783920L; + private static final long B_INCREMENT_3 = 1708783987L; + private static final long B_SECOND_FULL = 1708856935L; + private static final long E_FIRST_FULL = 1708883558L; + private static final long E_INCREMENT_1 = 1708883624L; + private static final long E_INCREMENT_2 = 1708883649L; + private static final long E_INCREMENT_3 = 1708883671L; + private static final long E_SECOND_FULL = 1708883739L; + private static final String UBUNTU_BACKUP = "ubuntu-backup"; + private static final String UB_FIRST_FULL = UBUNTU_BACKUP + DASH + B_FIRST_FULL; + private static final String UB_INCREMENT_1 = UBUNTU_BACKUP + DASH + B_INCREMENT_1; + private static final String UB_INCREMENT_2 = UBUNTU_BACKUP + DASH + B_INCREMENT_2; + private static final String UB_INCREMENT_3 = UBUNTU_BACKUP + DASH + B_INCREMENT_3; + private static final String UB_SECOND_FULL = UBUNTU_BACKUP + DASH + B_SECOND_FULL; + private static final String UBUNTU_ENCRYPTED = "ubuntu-encrypted"; + private static final String UE_FIRST_FULL = UBUNTU_ENCRYPTED + DASH + E_FIRST_FULL; + private static final String UE_INCREMENT_1 = UBUNTU_ENCRYPTED + DASH + E_INCREMENT_1; + private static final String UE_INCREMENT_2 = UBUNTU_ENCRYPTED + DASH + E_INCREMENT_2; + private static final String UE_INCREMENT_3 = UBUNTU_ENCRYPTED + DASH + E_INCREMENT_3; + private static final String UE_SECOND_FULL = UBUNTU_ENCRYPTED + DASH + E_SECOND_FULL; + private static final Set BOTH_SETS_UBUNTU_BACKUP = Set.of( + UB_FIRST_FULL, + UB_INCREMENT_1, + UB_INCREMENT_2, + UB_INCREMENT_3, + UB_SECOND_FULL); + private static final Set BOTH_SETS_UBUNTU_ENCRYPTED = Set.of( + UE_FIRST_FULL, + UE_INCREMENT_1, + UE_INCREMENT_2, + UE_INCREMENT_3, + UE_SECOND_FULL); + private static final Set FIRST_SET_UBUNTU_BACKUP = Set.of( + UB_FIRST_FULL, + UB_INCREMENT_1, + UB_INCREMENT_2, + UB_INCREMENT_3); + private static final Set FIRST_SET_UBUNTU_ENCRYPTED = Set.of( + UE_FIRST_FULL, + UE_INCREMENT_1, + UE_INCREMENT_2, + UE_INCREMENT_3); + + public Stream validRangeProvider() { + return Stream.builder() + .add(Arguments.of(B_FIRST_FULL, B_INCREMENT_3)) + .add(Arguments.of(B_FIRST_FULL, B_INCREMENT_2)) + .add(Arguments.of(B_FIRST_FULL, B_INCREMENT_1)) + .add(Arguments.of(B_INCREMENT_1, B_INCREMENT_2)) + .add(Arguments.of(B_INCREMENT_1, B_INCREMENT_3)) + .add(Arguments.of(B_INCREMENT_2, B_INCREMENT_3)) + .build(); + } + + @Test + void testConstructorShouldThrowExceptionWhenCalledWithInvalidStartTime() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(backupPath, UBUNTU_BACKUP, null, 0L, B_INCREMENT_1)); + + //then + exception + } + + @Test + void testConstructorShouldThrowExceptionWhenCalledWithInvalidEndTime() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(backupPath, UBUNTU_BACKUP, null, B_FIRST_FULL, B_INCREMENT_1 + 1L)); + + //then + exception + } + + @ParameterizedTest + @MethodSource("validRangeProvider") + void testConstructorShouldNotThrowExceptionWhenCalledWithValidRange(final long start, final long end) throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + + //when + final var actual = new MergeController(backupPath, UBUNTU_BACKUP, null, start, end); + + //then + Assertions.assertNotNull(actual); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndFirstIncrementWithoutEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(BOTH_SETS_UBUNTU_BACKUP, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_FIRST_FULL, B_INCREMENT_1); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UB_FIRST_FULL, UB_INCREMENT_1)); + verifyBackupFilesExist(Set.of(UB_FIRST_FULL + DASH + B_INCREMENT_1, UB_INCREMENT_2, UB_INCREMENT_3, UB_SECOND_FULL)); + restoreBackups(backupPath, actual.getFileNamePrefix(), null, Map.of( + "A/1.txt", "11111111", + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", "11111111", + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndFirstIncrementWithEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(BOTH_SETS_UBUNTU_ENCRYPTED, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_FIRST_FULL, E_INCREMENT_1); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UE_FIRST_FULL, UE_INCREMENT_1)); + verifyBackupFilesExist(Set.of(UE_FIRST_FULL + DASH + E_INCREMENT_1, UE_INCREMENT_2, UE_INCREMENT_3, UE_SECOND_FULL)); + restoreBackups(backupPath, actual.getFileNamePrefix(), KEK, Map.of( + "A/1.txt", "11111111", + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", "11111111", + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithAllIncrementsWithoutEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_INCREMENT_3); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UB_INCREMENT_1, UB_INCREMENT_2, UB_INCREMENT_3)); + verifyBackupFilesExist(Set.of(UB_FIRST_FULL, UB_INCREMENT_1 + DASH + B_INCREMENT_3)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), null, Map.of( + "A/1.txt", DASH, + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", DASH, + "A/2.txt", DASH, + "A/3.txt", DASH, + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithAllIncrementsWithEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_ENCRYPTED, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_INCREMENT_3); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UE_INCREMENT_1, UE_INCREMENT_2, UE_INCREMENT_3)); + verifyBackupFilesExist(Set.of(UE_FIRST_FULL, UE_INCREMENT_1 + DASH + E_INCREMENT_3)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), KEK, Map.of( + "A/1.txt", DASH, + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", DASH, + "A/2.txt", DASH, + "A/3.txt", DASH, + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithFirstTwoIncrementsWithoutEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_INCREMENT_2); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UB_INCREMENT_1, UB_INCREMENT_2)); + verifyBackupFilesExist(Set.of(UB_FIRST_FULL, UB_INCREMENT_1 + DASH + B_INCREMENT_2, UB_INCREMENT_3)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), null, Map.of( + "A/1.txt", DASH, + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", DASH, + "A/2.txt", DASH, + "A/3.txt", DASH, + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithFirstTwoIncrementsWithEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_ENCRYPTED, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_INCREMENT_2); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UE_INCREMENT_1, UE_INCREMENT_2)); + verifyBackupFilesExist(Set.of(UE_FIRST_FULL, UE_INCREMENT_1 + DASH + E_INCREMENT_2, UE_INCREMENT_3)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), KEK, Map.of( + "A/1.txt", DASH, + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", DASH, + "A/2.txt", DASH, + "A/3.txt", DASH, + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndTwoIncrementsWithoutEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + final var prefixes = Set.of( + UB_FIRST_FULL, + UB_INCREMENT_1, + UB_INCREMENT_2); + prepareBackupFiles(prefixes, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_FIRST_FULL, B_INCREMENT_2); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UB_FIRST_FULL, UB_INCREMENT_1, UB_INCREMENT_2)); + verifyBackupFilesExist(Set.of(UB_FIRST_FULL + DASH + B_INCREMENT_2)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), null, Map.of( + "A/1.txt", "11111111", + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", "11111111", + "A/2.txt", "2222\n", + "A/3.txt", "333333333\n", + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldMergeSelectedRangeWhenCalledWithFullBackupAndTwoIncrementsWithEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + final var prefixes = Set.of( + UE_FIRST_FULL, + UE_INCREMENT_1, + UE_INCREMENT_2); + prepareBackupFiles(prefixes, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_FIRST_FULL, E_INCREMENT_2); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UE_FIRST_FULL, UE_INCREMENT_1, UE_INCREMENT_2)); + verifyBackupFilesExist(Set.of(UE_FIRST_FULL + DASH + E_INCREMENT_2)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), KEK, Map.of( + "A/1.txt", "11111111", + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", "11111111", + "A/2.txt", "2222\n", + "A/3.txt", "333333333\n", + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldDeleteIncrementsFromSelectedRangeBeforeFullBackupWhenCalledWithAllIncrementsAndFullBackupWithoutEncryption() + throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(BOTH_SETS_UBUNTU_BACKUP, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_SECOND_FULL); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UB_INCREMENT_1, UB_INCREMENT_2, UB_INCREMENT_3, UB_SECOND_FULL)); + verifyBackupFilesExist(Set.of(UB_FIRST_FULL, UB_SECOND_FULL + DASH + B_SECOND_FULL)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), null, Map.of( + "A/1.txt", DASH, + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", DASH, + "A/2.txt", DASH, + "A/3.txt", DASH, + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void testExecuteShouldDeleteIncrementsFromSelectedRangeBeforeFullBackupWhenCalledWithAllIncrementsAndFullBackupWithEncryption() + throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(BOTH_SETS_UBUNTU_ENCRYPTED, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_SECOND_FULL); + + //when + final var actual = underTest.execute(true); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesAreDeleted(Set.of(UE_INCREMENT_1, UE_INCREMENT_2, UE_INCREMENT_3, UE_SECOND_FULL)); + verifyBackupFilesExist(Set.of(UE_FIRST_FULL, UE_SECOND_FULL + DASH + E_SECOND_FULL)); + restoreBackups(backupPath, actual.getConfiguration().getFileNamePrefix(), KEK, Map.of( + "A/1.txt", DASH, + "B/2.txt", "22222222-22222222" + ), Map.of( + "A/1.txt", DASH, + "A/2.txt", DASH, + "A/3.txt", DASH, + "B/2.txt", "22222222" + )); + } + + @Test + @DisabledOnOs(value = OS.WINDOWS, disabledReason = "Skipped because increment 2 content becomes corrupt during CRLF conversion") + void testExecuteShouldNotDeleteFilesWhenCalledWithFalseFlagWithoutEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_BACKUP, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_BACKUP, null, B_INCREMENT_1, B_INCREMENT_2); + + //when + final var actual = underTest.execute(false); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesExist(Set.of(UB_FIRST_FULL, UB_INCREMENT_1, UB_INCREMENT_2, UB_INCREMENT_3, + UB_INCREMENT_1 + DASH + B_INCREMENT_2)); + } + + @Test + void testExecuteShouldNotDeleteFilesWhenCalledWithFalseFlagWithEncryption() throws IOException { + //given + final var backupPath = testDataRoot.resolve("backup"); + Files.createDirectories(backupPath); + prepareBackupFiles(FIRST_SET_UBUNTU_ENCRYPTED, backupPath); + final var underTest = new MergeController(backupPath, UBUNTU_ENCRYPTED, KEK, E_INCREMENT_1, E_INCREMENT_2); + + //when + final var actual = underTest.execute(false); + + //then + Assertions.assertNotNull(actual); + verifyBackupFilesExist(Set.of(UE_FIRST_FULL, UE_INCREMENT_1, UE_INCREMENT_2, UE_INCREMENT_3, + UE_INCREMENT_1 + DASH + E_INCREMENT_2)); + } + + private static PrivateKey getKek() { + try { + final var keyStore = KeyStore.getInstance("PKCS12"); + keyStore.load(MergeControllerIntegrationTest.class.getResourceAsStream("/backups/test.p12.key"), "123".toCharArray()); + return (PrivateKey) keyStore.getKey("default", "123".toCharArray()); + } catch (final Exception e) { + Assertions.fail("Could not load kek", e); + return null; + } + } + + private void restoreBackups( + final Path backupPath, + final String fileNamePrefix, + final PrivateKey kek, + final Map rContents, + final Map uContents) throws IOException { + final var restorePath = testDataRoot.resolve("restore"); + Files.createDirectories(restorePath); + final var restoredR = restorePath.resolve("R"); + final var restoredU = restorePath.resolve("U"); + final var r = new RestoreTarget(BackupPath.ofPathAsIs("/tmp/R/barj-test"), restoredR); + final var u = new RestoreTarget(BackupPath.ofPathAsIs("/tmp/U/barj-test"), restoredU); + final var task = RestoreTask.builder() + .restoreTargets(new RestoreTargets(Set.of(r, u))) + .dryRun(false) + .threads(1) + .permissionComparisonStrategy(PermissionComparisonStrategy.RELAXED) + .build(); + new RestoreController(backupPath, fileNamePrefix, kek) + .execute(task); + verifyContents(restoredR, rContents); + verifyContents(restoredU, uContents); + } + + private static void verifyContents(final Path root, final Map expected) throws IOException { + for (final var entry : expected.entrySet()) { + if (entry.getValue().equals(DASH)) { + Assertions.assertFalse(Files.exists(root.resolve(entry.getKey())), + "File " + root.resolve(entry.getKey()) + " should not exist."); + } else { + Assertions.assertEquals(entry.getValue(), Files.readString(root.resolve(entry.getKey()))); + } + } + } + + private void verifyBackupFilesAreDeleted(final Set prefixes) { + for (final var prefix : prefixes) { + for (final var fileName : Stream.of(".00001.cargo", ".manifest.cargo", ".index.cargo").map(prefix::concat).toList()) { + final var path = testDataRoot.resolve("backup").resolve(fileName); + Assertions.assertFalse(Files.exists(path), "File " + path + " should be deleted"); + } + } + } + + private void verifyBackupFilesExist(final Set prefixes) { + for (final var prefix : prefixes) { + for (final var fileName : Stream.of(".00001.cargo", ".manifest.cargo", ".index.cargo").map(prefix::concat).toList()) { + final var path = testDataRoot.resolve("backup").resolve(fileName); + Assertions.assertTrue(Files.exists(path), "File " + path + " should exist."); + } + } + } +} diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerTest.java new file mode 100644 index 0000000..251e592 --- /dev/null +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/merge/MergeControllerTest.java @@ -0,0 +1,54 @@ +package com.github.nagyesta.filebarj.core.merge; + +import com.github.nagyesta.filebarj.core.TempFileAwareTest; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class MergeControllerTest extends TempFileAwareTest { + + @SuppressWarnings("DataFlowIssue") + @Test + void testConstructorShouldThrowExceptionWhenCalledWithNullDirectory() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(null, "prefix", null, 0L, 1L)); + + //then + exception + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testConstructorShouldThrowExceptionWhenCalledWithNullPrefix() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(testDataRoot, null, null, 0L, 1L)); + + //then + exception + } + + @Test + void testConstructorShouldThrowExceptionWhenCalledWithEndTimeLaterThanStartTime() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(testDataRoot, "prefix", null, 0L, -1L)); + + //then + exception + } + + @Test + void testConstructorShouldThrowExceptionWhenCalledWithStartTimeEqualToEndTime() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> new MergeController(testDataRoot, "prefix", null, 0L, 0L)); + + //then + exception + } +} diff --git a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java index 5b07a9a..9bac3bd 100644 --- a/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java +++ b/file-barj-core/src/test/java/com/github/nagyesta/filebarj/core/restore/pipeline/CrossPlatformRestoreIntegrationTest.java @@ -9,7 +9,6 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -24,11 +23,7 @@ void testRestoreShouldRestoreContentWhenRestoringABackupMadeOnWindows() throws I final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); Files.createDirectories(restorePath); - final var backupFiles = Set.of( - "windows-backup-1707544070.00001.cargo", - "windows-backup-1707544070.index.cargo", - "windows-backup-1707544070.manifest.cargo"); - prepareBackupFiles(backupFiles, backupPath); + prepareBackupFiles(Set.of("windows-backup-1707544070"), backupPath); final var restoredR = restorePath.resolve("R"); final var restoredU = restorePath.resolve("U"); final var r = new RestoreTarget(BackupPath.ofPathAsIs("R:/barj-test"), restoredR); @@ -55,11 +50,7 @@ void testRestoreShouldRestoreContentWhenRestoringABackupMadeOnUnix() throws IOEx final var backupPath = testDataRoot.resolve("backup"); Files.createDirectories(backupPath); Files.createDirectories(restorePath); - final var backupFiles = Set.of( - "ubuntu-backup-1707595719.00001.cargo", - "ubuntu-backup-1707595719.index.cargo", - "ubuntu-backup-1707595719.manifest.cargo"); - prepareBackupFiles(backupFiles, backupPath); + prepareBackupFiles(Set.of("ubuntu-backup-1707595719"), backupPath); final var restoredR = restorePath.resolve("R"); final var restoredU = restorePath.resolve("U"); final var r = new RestoreTarget(BackupPath.ofPathAsIs("/tmp/R/barj-test"), restoredR); @@ -79,14 +70,6 @@ void testRestoreShouldRestoreContentWhenRestoringABackupMadeOnUnix() throws IOEx verifyContent(restoredR, restoredU); } - @SuppressWarnings("DataFlowIssue") - private void prepareBackupFiles(final Set backupFiles, final Path backupPath) throws IOException { - for (final var filename : backupFiles) { - final var path = new File(getClass().getResource("/backups/" + filename).getFile()).toPath().toAbsolutePath(); - Files.copy(path, backupPath.resolve(filename)); - } - } - private void verifyContent(final Path restoredR, final Path restoredU) throws IOException { Assertions.assertEquals("11111111", Files.readString(restoredR.resolve("A/1.txt"))); Assertions.assertEquals("22222222-22222222", Files.readString(restoredR.resolve("B/2.txt"))); diff --git a/file-barj-core/src/test/resources/backups/test.p12.key b/file-barj-core/src/test/resources/backups/test.p12.key new file mode 100644 index 0000000..15c3bb0 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/test.p12.key differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.00001.cargo new file mode 100644 index 0000000..e69de29 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.index.cargo new file mode 100644 index 0000000..2def114 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.index.cargo @@ -0,0 +1,19 @@ +# File BaRJ Cargo Archive Index +00000001.path:/1 +00000001.type:DIRECTORY +00000001.encrypt:false +00000001.metadata.rel.start.idx:0 +00000001.metadata.rel.start.file:ubuntu-backup-1708783849.00001.cargo +00000001.metadata.rel.end.idx:0 +00000001.metadata.rel.end.file:ubuntu-backup-1708783849.00001.cargo +00000001.metadata.abs.start.idx:0 +00000001.metadata.abs.end.idx:0 +00000001.metadata.orig.size:0 +00000001.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000001.metadata.arch.size:0 +00000001.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +last.cnunk.index:1 +last.cnunk.size:0 +max.cnunk.size:536870912 +last.entity.index:1 +total.size:0 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.manifest.cargo new file mode 100644 index 0000000..35ddae0 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.manifest.cargo.json b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.manifest.cargo.json new file mode 100644 index 0000000..1ad5a08 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783849.manifest.cargo.json @@ -0,0 +1,262 @@ +{ + "backup_versions" : [ 1 ], + "encryption_keys" : null, + "app_version" : "0.12.1", + "start_time_utc_epoch_seconds" : 1708783849, + "file_name_prefix" : "ubuntu-backup-1708783849", + "backup_type" : "INCREMENTAL", + "operating_system" : "Linux", + "job_configuration" : { + "backup_type" : "INCREMENTAL", + "hash_algorithm" : "SHA256", + "compression_algorithm" : "NONE", + "encryption_key" : null, + "duplicate_strategy" : "KEEP_EACH", + "chunk_size_mebibyte" : 512, + "file_name_prefix" : "ubuntu-backup", + "destination_directory" : "file:///home/esta/Backup/", + "sources" : [ { + "path" : "file:///tmp/R", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + }, { + "path" : "file:///tmp/U", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + } ] + }, + "files" : { + "33459948-90d3-498b-8d1c-7facf06be36e" : { + "id" : "33459948-90d3-498b-8d1c-7facf06be36e", + "file_system_key" : "(dev=fd01,ino=13806029)", + "path" : "file:///tmp/U/barj-test/B/2.txt", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1707542996, + "last_accessed_utc_epoch_seconds" : 1707570208, + "created_utc_epoch_seconds" : 1707542996, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "d65f4917-748d-4c6a-ba09-25f5224ead18" + }, + "85a2fbfa-0dc8-4b74-aac7-86b8b2e6203e" : { + "id" : "85a2fbfa-0dc8-4b74-aac7-86b8b2e6203e", + "file_system_key" : "(dev=fd01,ino=13806021)", + "path" : "file:///tmp/R/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707543034, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707543034, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "dbf9d5d0-c252-4d51-9d36-8a20fef27b26" : { + "id" : "dbf9d5d0-c252-4d51-9d36-8a20fef27b26", + "file_system_key" : "(dev=fd01,ino=13806025)", + "path" : "file:///tmp/U/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1708783781, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1708783781, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "METADATA_CHANGED" + }, + "46585e05-357c-4a8b-a020-933f6e08a155" : { + "id" : "46585e05-357c-4a8b-a020-933f6e08a155", + "file_system_key" : "(dev=fd01,ino=13762900)", + "path" : "file:///tmp/U/barj-test/A/1.txt", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1708783781, + "last_accessed_utc_epoch_seconds" : 1708783781, + "created_utc_epoch_seconds" : 1708783781, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "METADATA_CHANGED", + "archive_metadata_id" : "273e5466-c7c8-43a0-9a82-e3b059ccc642" + }, + "f9d461fb-be47-4882-82e1-1351a9249796" : { + "id" : "f9d461fb-be47-4882-82e1-1351a9249796", + "file_system_key" : "(dev=fd01,ino=13806024)", + "path" : "file:///tmp/U/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707542974, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707542974, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "0b8fd306-eccc-44cf-950c-bd8aa0b86858" : { + "id" : "0b8fd306-eccc-44cf-950c-bd8aa0b86858", + "file_system_key" : "(dev=fd01,ino=13806023)", + "path" : "file:///tmp/U", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "b4f5cf7e-8cb8-47d5-95f4-8ff0048aa446" : { + "id" : "b4f5cf7e-8cb8-47d5-95f4-8ff0048aa446", + "file_system_key" : "(dev=fd01,ino=13806020)", + "path" : "file:///tmp/R", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783768, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "ed462ea3-d21d-4282-b2e0-ca64699ee793" : { + "id" : "ed462ea3-d21d-4282-b2e0-ca64699ee793", + "file_system_key" : "(dev=fd01,ino=13806028)", + "path" : "file:///tmp/R/barj-test/A/1.txt", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1707543051, + "last_accessed_utc_epoch_seconds" : 1708783781, + "created_utc_epoch_seconds" : 1707543051, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "7b71430c-d889-4959-b23c-5d52da715f26" + }, + "196ae42c-4d4a-4ca7-8832-8e1370ee1d77" : { + "id" : "196ae42c-4d4a-4ca7-8832-8e1370ee1d77", + "file_system_key" : "(dev=fd01,ino=13806027)", + "path" : "file:///tmp/R/barj-test/B/2.txt", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_size" : 17, + "last_modified_utc_epoch_seconds" : 1707543066, + "last_accessed_utc_epoch_seconds" : 1707570208, + "created_utc_epoch_seconds" : 1707543066, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "6722acf7-a446-4a87-a4b9-0170d4ef51c1" + }, + "1e09be30-abfe-482a-898d-78f585ece6ac" : { + "id" : "1e09be30-abfe-482a-898d-78f585ece6ac", + "file_system_key" : "(dev=fd01,ino=13806005)", + "path" : "file:///tmp/R/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783771, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "45ee9bb7-ca0b-45bb-b790-7419e5dfa789" : { + "id" : "45ee9bb7-ca0b-45bb-b790-7419e5dfa789", + "file_system_key" : "(dev=fd01,ino=13806004)", + "path" : "file:///tmp/U/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "416fd0c0-a381-4731-9ef9-3c0355a96e16" : { + "id" : "416fd0c0-a381-4731-9ef9-3c0355a96e16", + "file_system_key" : "(dev=fd01,ino=13806022)", + "path" : "file:///tmp/R/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707543045, + "last_accessed_utc_epoch_seconds" : 1708783774, + "created_utc_epoch_seconds" : 1707543045, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + } + }, + "archive_entries" : { + "6722acf7-a446-4a87-a4b9-0170d4ef51c1" : { + "id" : "6722acf7-a446-4a87-a4b9-0170d4ef51c1", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "b3f8cff6-a26d-45ee-8be4-ef97c8b43810" + }, + "archived_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "files" : [ "196ae42c-4d4a-4ca7-8832-8e1370ee1d77" ] + }, + "7b71430c-d889-4959-b23c-5d52da715f26" : { + "id" : "7b71430c-d889-4959-b23c-5d52da715f26", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "c29205b4-d30a-4bb7-a1fe-7a8a358344e7" + }, + "archived_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "files" : [ "ed462ea3-d21d-4282-b2e0-ca64699ee793" ] + }, + "d65f4917-748d-4c6a-ba09-25f5224ead18" : { + "id" : "d65f4917-748d-4c6a-ba09-25f5224ead18", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "874064d4-7bd0-432e-ac4c-da7ef50050c2" + }, + "archived_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "files" : [ "33459948-90d3-498b-8d1c-7facf06be36e" ] + }, + "273e5466-c7c8-43a0-9a82-e3b059ccc642" : { + "id" : "273e5466-c7c8-43a0-9a82-e3b059ccc642", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "c29205b4-d30a-4bb7-a1fe-7a8a358344e7" + }, + "archived_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "files" : [ "46585e05-357c-4a8b-a020-933f6e08a155" ] + } + }, + "index_file_name" : "ubuntu-backup-1708783849.index.cargo", + "data_file_names" : [ "ubuntu-backup-1708783849.00001.cargo" ] +} \ No newline at end of file diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.00001.cargo new file mode 100644 index 0000000..a08da47 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.00001.cargo @@ -0,0 +1,2 @@ +2222 +333333333 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.index.cargo new file mode 100644 index 0000000..ef8c49f --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.index.cargo @@ -0,0 +1,65 @@ +# File BaRJ Cargo Archive Index +00000001.path:/2 +00000001.type:DIRECTORY +00000001.encrypt:false +00000001.metadata.rel.start.idx:0 +00000001.metadata.rel.start.file:ubuntu-backup-1708783920.00001.cargo +00000001.metadata.rel.end.idx:0 +00000001.metadata.rel.end.file:ubuntu-backup-1708783920.00001.cargo +00000001.metadata.abs.start.idx:0 +00000001.metadata.abs.end.idx:0 +00000001.metadata.orig.size:0 +00000001.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000001.metadata.arch.size:0 +00000001.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000002.path:/2/b68dd145-f79e-41d4-846a-9d55bc11b0da +00000002.type:REGULAR_FILE +00000002.encrypt:false +00000002.content.rel.start.idx:0 +00000002.content.rel.start.file:ubuntu-backup-1708783920.00001.cargo +00000002.content.rel.end.idx:5 +00000002.content.rel.end.file:ubuntu-backup-1708783920.00001.cargo +00000002.content.abs.start.idx:0 +00000002.content.abs.end.idx:5 +00000002.content.orig.size:5 +00000002.content.orig.hash:f4dd4112a2540430e5a8a7158bcf120f8f2489bffa4d8f6aefa8dcd584047011 +00000002.content.arch.size:5 +00000002.content.arch.hash:f4dd4112a2540430e5a8a7158bcf120f8f2489bffa4d8f6aefa8dcd584047011 +00000002.metadata.rel.start.idx:5 +00000002.metadata.rel.start.file:ubuntu-backup-1708783920.00001.cargo +00000002.metadata.rel.end.idx:5 +00000002.metadata.rel.end.file:ubuntu-backup-1708783920.00001.cargo +00000002.metadata.abs.start.idx:5 +00000002.metadata.abs.end.idx:5 +00000002.metadata.orig.size:0 +00000002.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000002.metadata.arch.size:0 +00000002.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000003.path:/2/d47227c4-a0c3-4136-975b-8d06e1908b86 +00000003.type:REGULAR_FILE +00000003.encrypt:false +00000003.content.rel.start.idx:5 +00000003.content.rel.start.file:ubuntu-backup-1708783920.00001.cargo +00000003.content.rel.end.idx:15 +00000003.content.rel.end.file:ubuntu-backup-1708783920.00001.cargo +00000003.content.abs.start.idx:5 +00000003.content.abs.end.idx:15 +00000003.content.orig.size:10 +00000003.content.orig.hash:00952f78a92441fbf212a3f3095c816c861bfa185dc6c387f12e1e1b7def37ae +00000003.content.arch.size:10 +00000003.content.arch.hash:00952f78a92441fbf212a3f3095c816c861bfa185dc6c387f12e1e1b7def37ae +00000003.metadata.rel.start.idx:15 +00000003.metadata.rel.start.file:ubuntu-backup-1708783920.00001.cargo +00000003.metadata.rel.end.idx:15 +00000003.metadata.rel.end.file:ubuntu-backup-1708783920.00001.cargo +00000003.metadata.abs.start.idx:15 +00000003.metadata.abs.end.idx:15 +00000003.metadata.orig.size:0 +00000003.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000003.metadata.arch.size:0 +00000003.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +last.cnunk.index:1 +last.cnunk.size:15 +max.cnunk.size:536870912 +last.entity.index:3 +total.size:15 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.manifest.cargo new file mode 100644 index 0000000..b114906 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.manifest.cargo.json b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.manifest.cargo.json new file mode 100644 index 0000000..f3e84e4 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783920.manifest.cargo.json @@ -0,0 +1,316 @@ +{ + "backup_versions" : [ 2 ], + "encryption_keys" : null, + "app_version" : "0.12.1", + "start_time_utc_epoch_seconds" : 1708783920, + "file_name_prefix" : "ubuntu-backup-1708783920", + "backup_type" : "INCREMENTAL", + "operating_system" : "Linux", + "job_configuration" : { + "backup_type" : "INCREMENTAL", + "hash_algorithm" : "SHA256", + "compression_algorithm" : "NONE", + "encryption_key" : null, + "duplicate_strategy" : "KEEP_EACH", + "chunk_size_mebibyte" : 512, + "file_name_prefix" : "ubuntu-backup", + "destination_directory" : "file:///home/esta/Backup/", + "sources" : [ { + "path" : "file:///tmp/R", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + }, { + "path" : "file:///tmp/U", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + } ] + }, + "files" : { + "31cde011-6a1f-4765-ba9e-7c8b54c09ff1" : { + "id" : "31cde011-6a1f-4765-ba9e-7c8b54c09ff1", + "file_system_key" : "(dev=fd01,ino=13806024)", + "path" : "file:///tmp/U/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707542974, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707542974, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "d7f57bb2-f8a4-450e-9e53-0b0cad868c52" : { + "id" : "d7f57bb2-f8a4-450e-9e53-0b0cad868c52", + "file_system_key" : "(dev=fd01,ino=13806023)", + "path" : "file:///tmp/U", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "89eaecde-c090-48a8-a5a1-10003f627d68" : { + "id" : "89eaecde-c090-48a8-a5a1-10003f627d68", + "file_system_key" : "(dev=fd01,ino=13806027)", + "path" : "file:///tmp/R/barj-test/B/2.txt", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_size" : 17, + "last_modified_utc_epoch_seconds" : 1707543066, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1707543066, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "22df803b-dbbd-44d6-bd84-965153c10b5a" + }, + "deb675c0-d4a9-4ff4-b5d7-874a8ce94533" : { + "id" : "deb675c0-d4a9-4ff4-b5d7-874a8ce94533", + "file_system_key" : "(dev=fd01,ino=13806021)", + "path" : "file:///tmp/R/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707543034, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707543034, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "b6dd4eb5-3496-4a98-b386-b9b5817af9a3" : { + "id" : "b6dd4eb5-3496-4a98-b386-b9b5817af9a3", + "file_system_key" : "(dev=fd01,ino=13806028)", + "path" : "file:///tmp/R/barj-test/A/1.txt", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1707543051, + "last_accessed_utc_epoch_seconds" : 1708783781, + "created_utc_epoch_seconds" : 1707543051, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "b2dc184f-73db-4783-8c2a-d63f037429b3" + }, + "bd518d61-ca29-4e78-965d-f4b96691f9ba" : { + "id" : "bd518d61-ca29-4e78-965d-f4b96691f9ba", + "file_system_key" : "(dev=fd01,ino=13806022)", + "path" : "file:///tmp/R/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707543045, + "last_accessed_utc_epoch_seconds" : 1708783774, + "created_utc_epoch_seconds" : 1707543045, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "b972dcc4-f122-4998-85d3-4eede0569f76" : { + "id" : "b972dcc4-f122-4998-85d3-4eede0569f76", + "file_system_key" : "(dev=fd01,ino=13806004)", + "path" : "file:///tmp/U/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "101fe098-dfe0-49ee-9da1-5d1f190fb54e" : { + "id" : "101fe098-dfe0-49ee-9da1-5d1f190fb54e", + "file_system_key" : "(dev=fd01,ino=13762900)", + "path" : "file:///tmp/U/barj-test/A/1.txt", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1708783781, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1708783781, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "d29eda3b-3cdc-43c0-8300-05ca5cf29a7a" + }, + "c5ce673e-6da2-428e-8551-10fea6cf160b" : { + "id" : "c5ce673e-6da2-428e-8551-10fea6cf160b", + "file_system_key" : "(dev=fd01,ino=13806029)", + "path" : "file:///tmp/U/barj-test/B/2.txt", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1707542996, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1707542996, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "0bfe1db9-ef29-4b59-b7b4-e01f2d695bab" + }, + "097aa5d9-d6fc-4f7b-a5a2-c6a33f2183b3" : { + "id" : "097aa5d9-d6fc-4f7b-a5a2-c6a33f2183b3", + "file_system_key" : "(dev=fd01,ino=13806005)", + "path" : "file:///tmp/R/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783771, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "163c5f0f-9dce-4253-b77c-9c2ba8cbd02a" : { + "id" : "163c5f0f-9dce-4253-b77c-9c2ba8cbd02a", + "file_system_key" : "(dev=fd01,ino=13763348)", + "path" : "file:///tmp/U/barj-test/A/2.txt", + "original_hash" : "f4dd4112a2540430e5a8a7158bcf120f8f2489bffa4d8f6aefa8dcd584047011", + "original_size" : 5, + "last_modified_utc_epoch_seconds" : 1708783893, + "last_accessed_utc_epoch_seconds" : 1708783893, + "created_utc_epoch_seconds" : 1708783893, + "permissions" : "rw-rw-r--", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NEW", + "archive_metadata_id" : "b68dd145-f79e-41d4-846a-9d55bc11b0da" + }, + "bbf1cc86-0f8c-4a8c-8242-776256b8c856" : { + "id" : "bbf1cc86-0f8c-4a8c-8242-776256b8c856", + "file_system_key" : "(dev=fd01,ino=13806025)", + "path" : "file:///tmp/U/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1708783904, + "last_accessed_utc_epoch_seconds" : 1708783920, + "created_utc_epoch_seconds" : 1708783904, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "METADATA_CHANGED" + }, + "77fbaa89-10d1-4ccc-99fa-2dc5d90fb123" : { + "id" : "77fbaa89-10d1-4ccc-99fa-2dc5d90fb123", + "file_system_key" : "(dev=fd01,ino=13806020)", + "path" : "file:///tmp/R", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783768, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "af922b0c-057c-4f3f-9d87-d63028dc1308" : { + "id" : "af922b0c-057c-4f3f-9d87-d63028dc1308", + "file_system_key" : "(dev=fd01,ino=13763393)", + "path" : "file:///tmp/U/barj-test/A/3.txt", + "original_hash" : "00952f78a92441fbf212a3f3095c816c861bfa185dc6c387f12e1e1b7def37ae", + "original_size" : 10, + "last_modified_utc_epoch_seconds" : 1708783904, + "last_accessed_utc_epoch_seconds" : 1708783904, + "created_utc_epoch_seconds" : 1708783904, + "permissions" : "rw-rw-r--", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NEW", + "archive_metadata_id" : "d47227c4-a0c3-4136-975b-8d06e1908b86" + } + }, + "archive_entries" : { + "22df803b-dbbd-44d6-bd84-965153c10b5a" : { + "id" : "22df803b-dbbd-44d6-bd84-965153c10b5a", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "b3f8cff6-a26d-45ee-8be4-ef97c8b43810" + }, + "archived_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "files" : [ "89eaecde-c090-48a8-a5a1-10003f627d68" ] + }, + "b68dd145-f79e-41d4-846a-9d55bc11b0da" : { + "id" : "b68dd145-f79e-41d4-846a-9d55bc11b0da", + "archive_location" : { + "backup_increment" : 2, + "entry_name" : "b68dd145-f79e-41d4-846a-9d55bc11b0da" + }, + "archived_hash" : "f4dd4112a2540430e5a8a7158bcf120f8f2489bffa4d8f6aefa8dcd584047011", + "original_hash" : "f4dd4112a2540430e5a8a7158bcf120f8f2489bffa4d8f6aefa8dcd584047011", + "files" : [ "163c5f0f-9dce-4253-b77c-9c2ba8cbd02a" ] + }, + "d47227c4-a0c3-4136-975b-8d06e1908b86" : { + "id" : "d47227c4-a0c3-4136-975b-8d06e1908b86", + "archive_location" : { + "backup_increment" : 2, + "entry_name" : "d47227c4-a0c3-4136-975b-8d06e1908b86" + }, + "archived_hash" : "00952f78a92441fbf212a3f3095c816c861bfa185dc6c387f12e1e1b7def37ae", + "original_hash" : "00952f78a92441fbf212a3f3095c816c861bfa185dc6c387f12e1e1b7def37ae", + "files" : [ "af922b0c-057c-4f3f-9d87-d63028dc1308" ] + }, + "b2dc184f-73db-4783-8c2a-d63f037429b3" : { + "id" : "b2dc184f-73db-4783-8c2a-d63f037429b3", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "c29205b4-d30a-4bb7-a1fe-7a8a358344e7" + }, + "archived_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "files" : [ "b6dd4eb5-3496-4a98-b386-b9b5817af9a3" ] + }, + "d29eda3b-3cdc-43c0-8300-05ca5cf29a7a" : { + "id" : "d29eda3b-3cdc-43c0-8300-05ca5cf29a7a", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "c29205b4-d30a-4bb7-a1fe-7a8a358344e7" + }, + "archived_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "original_hash" : "ee79976c9380d5e337fc1c095ece8c8f22f91f306ceeb161fa51fecede2c4ba1", + "files" : [ "101fe098-dfe0-49ee-9da1-5d1f190fb54e" ] + }, + "0bfe1db9-ef29-4b59-b7b4-e01f2d695bab" : { + "id" : "0bfe1db9-ef29-4b59-b7b4-e01f2d695bab", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "874064d4-7bd0-432e-ac4c-da7ef50050c2" + }, + "archived_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "files" : [ "c5ce673e-6da2-428e-8551-10fea6cf160b" ] + } + }, + "index_file_name" : "ubuntu-backup-1708783920.index.cargo", + "data_file_names" : [ "ubuntu-backup-1708783920.00001.cargo" ] +} \ No newline at end of file diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.00001.cargo new file mode 100644 index 0000000..e69de29 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.index.cargo new file mode 100644 index 0000000..38ea84a --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.index.cargo @@ -0,0 +1,19 @@ +# File BaRJ Cargo Archive Index +00000001.path:/3 +00000001.type:DIRECTORY +00000001.encrypt:false +00000001.metadata.rel.start.idx:0 +00000001.metadata.rel.start.file:ubuntu-backup-1708783987.00001.cargo +00000001.metadata.rel.end.idx:0 +00000001.metadata.rel.end.file:ubuntu-backup-1708783987.00001.cargo +00000001.metadata.abs.start.idx:0 +00000001.metadata.abs.end.idx:0 +00000001.metadata.orig.size:0 +00000001.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000001.metadata.arch.size:0 +00000001.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +last.cnunk.index:1 +last.cnunk.size:0 +max.cnunk.size:536870912 +last.entity.index:1 +total.size:0 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.manifest.cargo new file mode 100644 index 0000000..09fcaed Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.manifest.cargo.json b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.manifest.cargo.json new file mode 100644 index 0000000..efbdbb1 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708783987.manifest.cargo.json @@ -0,0 +1,208 @@ +{ + "backup_versions" : [ 3 ], + "encryption_keys" : null, + "app_version" : "0.12.1", + "start_time_utc_epoch_seconds" : 1708783987, + "file_name_prefix" : "ubuntu-backup-1708783987", + "backup_type" : "INCREMENTAL", + "operating_system" : "Linux", + "job_configuration" : { + "backup_type" : "INCREMENTAL", + "hash_algorithm" : "SHA256", + "compression_algorithm" : "NONE", + "encryption_key" : null, + "duplicate_strategy" : "KEEP_EACH", + "chunk_size_mebibyte" : 512, + "file_name_prefix" : "ubuntu-backup", + "destination_directory" : "file:///home/esta/Backup/", + "sources" : [ { + "path" : "file:///tmp/R", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + }, { + "path" : "file:///tmp/U", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + } ] + }, + "files" : { + "c0983998-8232-46b1-b45f-621495b97011" : { + "id" : "c0983998-8232-46b1-b45f-621495b97011", + "file_system_key" : "(dev=fd01,ino=13806022)", + "path" : "file:///tmp/R/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1708783966, + "last_accessed_utc_epoch_seconds" : 1708783987, + "created_utc_epoch_seconds" : 1708783966, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "METADATA_CHANGED" + }, + "8d4a04b4-f262-427e-88cc-a9f3a1b1826d" : { + "id" : "8d4a04b4-f262-427e-88cc-a9f3a1b1826d", + "file_system_key" : "(dev=fd01,ino=13806004)", + "path" : "file:///tmp/U/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "af9738a4-628c-4795-b156-f0c1a82d9d41" : { + "id" : "af9738a4-628c-4795-b156-f0c1a82d9d41", + "file_system_key" : "(dev=fd01,ino=13806029)", + "path" : "file:///tmp/U/barj-test/B/2.txt", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1707542996, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1707542996, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "cf989724-425f-4dc3-9997-07edb76e4b2d" + }, + "48cc3915-dedb-4093-ba71-5acc7610d5bd" : { + "id" : "48cc3915-dedb-4093-ba71-5acc7610d5bd", + "file_system_key" : "(dev=fd01,ino=13806005)", + "path" : "file:///tmp/R/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783771, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "9c9c8baf-b652-4d9a-9467-9fa389acb580" : { + "id" : "9c9c8baf-b652-4d9a-9467-9fa389acb580", + "file_system_key" : "(dev=fd01,ino=13806027)", + "path" : "file:///tmp/R/barj-test/B/2.txt", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_size" : 17, + "last_modified_utc_epoch_seconds" : 1707543066, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1707543066, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NO_CHANGE", + "archive_metadata_id" : "1388e5b3-e842-44b6-9cc9-e2fa383883bf" + }, + "0e5093c5-cd72-49fb-a4ce-a395a57fd09c" : { + "id" : "0e5093c5-cd72-49fb-a4ce-a395a57fd09c", + "file_system_key" : "(dev=fd01,ino=13806021)", + "path" : "file:///tmp/R/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707543034, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707543034, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "7ac5d772-3877-43f5-b9f8-62c48a180417" : { + "id" : "7ac5d772-3877-43f5-b9f8-62c48a180417", + "file_system_key" : "(dev=fd01,ino=13806024)", + "path" : "file:///tmp/U/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707542974, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707542974, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "fbbec41e-512a-467b-8202-cf71f4f2f579" : { + "id" : "fbbec41e-512a-467b-8202-cf71f4f2f579", + "file_system_key" : "(dev=fd01,ino=13806020)", + "path" : "file:///tmp/R", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783768, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + }, + "c63e84b4-2900-4372-a368-6ef659de6b96" : { + "id" : "c63e84b4-2900-4372-a368-6ef659de6b96", + "file_system_key" : "(dev=fd01,ino=13806025)", + "path" : "file:///tmp/U/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1708783978, + "last_accessed_utc_epoch_seconds" : 1708783987, + "created_utc_epoch_seconds" : 1708783978, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "METADATA_CHANGED" + }, + "51131a2a-17be-4e8a-b673-09f4a75c3a69" : { + "id" : "51131a2a-17be-4e8a-b673-09f4a75c3a69", + "file_system_key" : "(dev=fd01,ino=13806023)", + "path" : "file:///tmp/U", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NO_CHANGE" + } + }, + "archive_entries" : { + "cf989724-425f-4dc3-9997-07edb76e4b2d" : { + "id" : "cf989724-425f-4dc3-9997-07edb76e4b2d", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "874064d4-7bd0-432e-ac4c-da7ef50050c2" + }, + "archived_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "files" : [ "af9738a4-628c-4795-b156-f0c1a82d9d41" ] + }, + "1388e5b3-e842-44b6-9cc9-e2fa383883bf" : { + "id" : "1388e5b3-e842-44b6-9cc9-e2fa383883bf", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "b3f8cff6-a26d-45ee-8be4-ef97c8b43810" + }, + "archived_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "files" : [ "9c9c8baf-b652-4d9a-9467-9fa389acb580" ] + } + }, + "index_file_name" : "ubuntu-backup-1708783987.index.cargo", + "data_file_names" : [ "ubuntu-backup-1708783987.00001.cargo" ] +} \ No newline at end of file diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.00001.cargo new file mode 100644 index 0000000..ad1ecb0 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.00001.cargo @@ -0,0 +1 @@ +2222222222222222-22222222 \ No newline at end of file diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.index.cargo new file mode 100644 index 0000000..16c0fc4 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.index.cargo @@ -0,0 +1,65 @@ +# File BaRJ Cargo Archive Index +00000001.path:/0 +00000001.type:DIRECTORY +00000001.encrypt:false +00000001.metadata.rel.start.idx:0 +00000001.metadata.rel.start.file:ubuntu-backup-1708856935.00001.cargo +00000001.metadata.rel.end.idx:0 +00000001.metadata.rel.end.file:ubuntu-backup-1708856935.00001.cargo +00000001.metadata.abs.start.idx:0 +00000001.metadata.abs.end.idx:0 +00000001.metadata.orig.size:0 +00000001.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000001.metadata.arch.size:0 +00000001.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000002.path:/0/e6b6089a-4863-439d-b4f3-555281a386a6 +00000002.type:REGULAR_FILE +00000002.encrypt:false +00000002.content.rel.start.idx:0 +00000002.content.rel.start.file:ubuntu-backup-1708856935.00001.cargo +00000002.content.rel.end.idx:8 +00000002.content.rel.end.file:ubuntu-backup-1708856935.00001.cargo +00000002.content.abs.start.idx:0 +00000002.content.abs.end.idx:8 +00000002.content.orig.size:8 +00000002.content.orig.hash:33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629 +00000002.content.arch.size:8 +00000002.content.arch.hash:33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629 +00000002.metadata.rel.start.idx:8 +00000002.metadata.rel.start.file:ubuntu-backup-1708856935.00001.cargo +00000002.metadata.rel.end.idx:8 +00000002.metadata.rel.end.file:ubuntu-backup-1708856935.00001.cargo +00000002.metadata.abs.start.idx:8 +00000002.metadata.abs.end.idx:8 +00000002.metadata.orig.size:0 +00000002.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000002.metadata.arch.size:0 +00000002.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000003.path:/0/14f7f2d3-4b9d-4abf-9440-9a9d5e2f66af +00000003.type:REGULAR_FILE +00000003.encrypt:false +00000003.content.rel.start.idx:8 +00000003.content.rel.start.file:ubuntu-backup-1708856935.00001.cargo +00000003.content.rel.end.idx:25 +00000003.content.rel.end.file:ubuntu-backup-1708856935.00001.cargo +00000003.content.abs.start.idx:8 +00000003.content.abs.end.idx:25 +00000003.content.orig.size:17 +00000003.content.orig.hash:37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601 +00000003.content.arch.size:17 +00000003.content.arch.hash:37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601 +00000003.metadata.rel.start.idx:25 +00000003.metadata.rel.start.file:ubuntu-backup-1708856935.00001.cargo +00000003.metadata.rel.end.idx:25 +00000003.metadata.rel.end.file:ubuntu-backup-1708856935.00001.cargo +00000003.metadata.abs.start.idx:25 +00000003.metadata.abs.end.idx:25 +00000003.metadata.orig.size:0 +00000003.metadata.orig.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +00000003.metadata.arch.size:0 +00000003.metadata.arch.hash:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +last.cnunk.index:1 +last.cnunk.size:25 +max.cnunk.size:536870912 +last.entity.index:3 +total.size:25 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.manifest.cargo new file mode 100644 index 0000000..920898c Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.manifest.cargo.json b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.manifest.cargo.json new file mode 100644 index 0000000..76d17f5 --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-backup-1708856935.manifest.cargo.json @@ -0,0 +1,208 @@ +{ + "backup_versions" : [ 0 ], + "encryption_keys" : null, + "app_version" : "0.13.0", + "start_time_utc_epoch_seconds" : 1708856935, + "file_name_prefix" : "ubuntu-backup-1708856935", + "backup_type" : "FULL", + "operating_system" : "Linux", + "job_configuration" : { + "backup_type" : "INCREMENTAL", + "hash_algorithm" : "SHA256", + "compression_algorithm" : "NONE", + "encryption_key" : null, + "duplicate_strategy" : "KEEP_EACH", + "chunk_size_mebibyte" : 512, + "file_name_prefix" : "ubuntu-backup", + "destination_directory" : "file:///home/esta/Backup/", + "sources" : [ { + "path" : "file:///tmp/R", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + }, { + "path" : "file:///tmp/U", + "include_patterns" : [ "**" ], + "exclude_patterns" : [ ] + } ] + }, + "files" : { + "55b54ba5-7dc6-4308-a210-664fc2621b73" : { + "id" : "55b54ba5-7dc6-4308-a210-664fc2621b73", + "file_system_key" : "(dev=fd01,ino=13806021)", + "path" : "file:///tmp/R/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707543034, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707543034, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "b9bf30b4-b48a-4790-8bb6-26ed87dddf76" : { + "id" : "b9bf30b4-b48a-4790-8bb6-26ed87dddf76", + "file_system_key" : "(dev=fd01,ino=13806029)", + "path" : "file:///tmp/U/barj-test/B/2.txt", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_size" : 8, + "last_modified_utc_epoch_seconds" : 1707542996, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1707542996, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NEW", + "archive_metadata_id" : "e6b6089a-4863-439d-b4f3-555281a386a6" + }, + "2c6fa2ab-acba-4e63-969a-3736edbb198e" : { + "id" : "2c6fa2ab-acba-4e63-969a-3736edbb198e", + "file_system_key" : "(dev=fd01,ino=13806020)", + "path" : "file:///tmp/R", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783768, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "b12152fe-34ca-4106-8de9-0835486c86b7" : { + "id" : "b12152fe-34ca-4106-8de9-0835486c86b7", + "file_system_key" : "(dev=fd01,ino=13806027)", + "path" : "file:///tmp/R/barj-test/B/2.txt", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_size" : 17, + "last_modified_utc_epoch_seconds" : 1707543066, + "last_accessed_utc_epoch_seconds" : 1708783850, + "created_utc_epoch_seconds" : 1707543066, + "permissions" : "rwxrwxrwx", + "owner" : "esta", + "group" : "esta", + "file_type" : "REGULAR_FILE", + "hidden" : false, + "status" : "NEW", + "archive_metadata_id" : "14f7f2d3-4b9d-4abf-9440-9a9d5e2f66af" + }, + "cead6403-3247-44ee-86a9-2d0eb0118954" : { + "id" : "cead6403-3247-44ee-86a9-2d0eb0118954", + "file_system_key" : "(dev=fd01,ino=13806005)", + "path" : "file:///tmp/R/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595671, + "last_accessed_utc_epoch_seconds" : 1708783771, + "created_utc_epoch_seconds" : 1707595671, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "2ad0a399-088a-403f-b5f3-ed2808b8d697" : { + "id" : "2ad0a399-088a-403f-b5f3-ed2808b8d697", + "file_system_key" : "(dev=fd01,ino=13806004)", + "path" : "file:///tmp/U/barj-test", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "758d55e5-0a48-4f58-92a7-ba6bf442e0dc" : { + "id" : "758d55e5-0a48-4f58-92a7-ba6bf442e0dc", + "file_system_key" : "(dev=fd01,ino=13806022)", + "path" : "file:///tmp/R/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1708783966, + "last_accessed_utc_epoch_seconds" : 1708783987, + "created_utc_epoch_seconds" : 1708783966, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "2fbae446-21ce-432f-a066-b54e1f9c3673" : { + "id" : "2fbae446-21ce-432f-a066-b54e1f9c3673", + "file_system_key" : "(dev=fd01,ino=13806025)", + "path" : "file:///tmp/U/barj-test/A", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1708783978, + "last_accessed_utc_epoch_seconds" : 1708783987, + "created_utc_epoch_seconds" : 1708783978, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "f23c6707-ce06-427f-8f14-449e04c4c9d7" : { + "id" : "f23c6707-ce06-427f-8f14-449e04c4c9d7", + "file_system_key" : "(dev=fd01,ino=13806023)", + "path" : "file:///tmp/U", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707595650, + "last_accessed_utc_epoch_seconds" : 1708783722, + "created_utc_epoch_seconds" : 1707595650, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + }, + "afccb254-8669-4e92-913f-53890c914309" : { + "id" : "afccb254-8669-4e92-913f-53890c914309", + "file_system_key" : "(dev=fd01,ino=13806024)", + "path" : "file:///tmp/U/barj-test/B", + "original_size" : 4096, + "last_modified_utc_epoch_seconds" : 1707542974, + "last_accessed_utc_epoch_seconds" : 1708783849, + "created_utc_epoch_seconds" : 1707542974, + "permissions" : "rwxrwxr-x", + "owner" : "esta", + "group" : "esta", + "file_type" : "DIRECTORY", + "hidden" : false, + "status" : "NEW" + } + }, + "archive_entries" : { + "e6b6089a-4863-439d-b4f3-555281a386a6" : { + "id" : "e6b6089a-4863-439d-b4f3-555281a386a6", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "e6b6089a-4863-439d-b4f3-555281a386a6" + }, + "archived_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "original_hash" : "33a7d3da476a32ac237b3f603a1be62fad00299e0d4b5a8db8d913104edec629", + "files" : [ "b9bf30b4-b48a-4790-8bb6-26ed87dddf76" ] + }, + "14f7f2d3-4b9d-4abf-9440-9a9d5e2f66af" : { + "id" : "14f7f2d3-4b9d-4abf-9440-9a9d5e2f66af", + "archive_location" : { + "backup_increment" : 0, + "entry_name" : "14f7f2d3-4b9d-4abf-9440-9a9d5e2f66af" + }, + "archived_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "original_hash" : "37d6cb2fb5a76e3bfcd3a0f4d738bd235b2b50c8c9ba09f9b9a357543243b601", + "files" : [ "b12152fe-34ca-4106-8de9-0835486c86b7" ] + } + }, + "index_file_name" : "ubuntu-backup-1708856935.index.cargo", + "data_file_names" : [ "ubuntu-backup-1708856935.00001.cargo" ] +} \ No newline at end of file diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.00001.cargo new file mode 100644 index 0000000..ee65f00 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.00001.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.index.cargo new file mode 100644 index 0000000..97f4c41 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.index.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.manifest.cargo new file mode 100644 index 0000000..696d675 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883558.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.00001.cargo new file mode 100644 index 0000000..e69de29 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.index.cargo new file mode 100644 index 0000000..6ff3aae Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.index.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.manifest.cargo new file mode 100644 index 0000000..251eb22 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883624.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.00001.cargo new file mode 100644 index 0000000..4a866eb --- /dev/null +++ b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.00001.cargo @@ -0,0 +1 @@ +[*Hkk&[0bhcx5ϛnOP (`X g7w|8 AI߈dM%(}L0U}sy[@FWU)lY*>.z~5 \ No newline at end of file diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.index.cargo new file mode 100644 index 0000000..3b23026 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.index.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.manifest.cargo new file mode 100644 index 0000000..b962909 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883649.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.00001.cargo new file mode 100644 index 0000000..e69de29 diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.index.cargo new file mode 100644 index 0000000..9103cb3 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.index.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.manifest.cargo new file mode 100644 index 0000000..3788c6b Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883671.manifest.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.00001.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.00001.cargo new file mode 100644 index 0000000..84a7488 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.00001.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.index.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.index.cargo new file mode 100644 index 0000000..0bc99ae Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.index.cargo differ diff --git a/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.manifest.cargo b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.manifest.cargo new file mode 100644 index 0000000..4c149e3 Binary files /dev/null and b/file-barj-core/src/test/resources/backups/ubuntu-encrypted-1708883739.manifest.cargo differ diff --git a/file-barj-job/README.md b/file-barj-job/README.md index 9501a26..d3c4ddb 100644 --- a/file-barj-job/README.md +++ b/file-barj-job/README.md @@ -48,6 +48,29 @@ java -jar build/libs/file-barj-job.jar \ --threads 2 ``` +### Merging backup increments + +Execute the following command (assuming that your executable is named accordingly). + +```commandline +java -jar build/libs/file-barj-job.jar \ + --merge \ + --backup-source /backup/directory/path \ + --prefix backup-job-file-prefix \ + --delete-obsolete false \ + --key-store keys.p12 \ + --key-alias alias \ + --from-epoch-seconds 123456 \ + --to-epoch-seconds 234567 +``` + +The above command will merge all backup files starting with the one created at 123456 (epoch seconds), +including the last file created at 234567 (epoch seconds) and also including every other increment +between them. + +> [!WARNING] +> If there is a full backup between the two, every incremental backup in the range which was created before the last full backup of the range will be ignored during the merge and deleted if the configuration allows deletion of obsolete files. + ### Restoring a backup to a directory Execute the following command (assuming that your executable is named accordingly). diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java index c6eb0de..661f223 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/Controller.java @@ -7,6 +7,7 @@ import com.github.nagyesta.filebarj.core.config.RestoreTargets; import com.github.nagyesta.filebarj.core.config.RestoreTask; import com.github.nagyesta.filebarj.core.inspect.pipeline.IncrementInspectionController; +import com.github.nagyesta.filebarj.core.merge.MergeController; import com.github.nagyesta.filebarj.core.restore.pipeline.RestoreController; import com.github.nagyesta.filebarj.io.stream.crypto.EncryptionUtil; import com.github.nagyesta.filebarj.job.cli.*; @@ -66,6 +67,12 @@ public void run() throws Exception { .toArray(String[]::new), console).getResult(); doRestore(restoreProperties); break; + case MERGE: + final var mergeProperties = new CliMergeParser(Arrays.stream(args) + .skip(1) + .toArray(String[]::new), console).getResult(); + doMerge(mergeProperties); + break; case GEN_KEYS: final var keyStoreProperties = new CliKeyGenParser(Arrays.stream(args) .skip(1) @@ -146,6 +153,18 @@ protected void doRestore(final RestoreProperties properties) { log.info("Restore operation completed. Total time: {}", toProcessSummary(durationMillis)); } + protected void doMerge(final MergeProperties properties) { + final var kek = getPrivateKey(properties.getKeyProperties()); + final var startTimeMillis = System.currentTimeMillis(); + log.info("Bootstrapping merge operation..."); + new MergeController(properties.getBackupSource(), properties.getPrefix(), kek, + properties.getFromTimeEpochSeconds(), properties.getToTimeEpochSeconds()) + .execute(properties.isDeleteObsoleteFiles()); + final var endTimeMillis = System.currentTimeMillis(); + final var durationMillis = (endTimeMillis - startTimeMillis); + log.info("Merge operation completed. Total time: {}", toProcessSummary(durationMillis)); + } + protected void doBackup(final BackupProperties properties) throws IOException { final var config = new ObjectMapper().reader().readValue(properties.getConfig().toFile(), BackupJobConfiguration.class); final var startTimeMillis = System.currentTimeMillis(); diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliMergeParser.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliMergeParser.java new file mode 100644 index 0000000..3a911bc --- /dev/null +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliMergeParser.java @@ -0,0 +1,73 @@ +package com.github.nagyesta.filebarj.job.cli; + +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; + +import java.io.Console; +import java.nio.file.Path; + +/** + * Parser class for the command line arguments of the merge task. + */ +@Slf4j +public class CliMergeParser extends CliICommonBackupFileParser { + + private static final String DELETE_OBSOLETE = "delete-obsolete"; + private static final String FROM_EPOCH_SECONDS = "from-epoch-seconds"; + private static final String TO_EPOCH_SECONDS = "to-epoch-seconds"; + + /** + * Creates a new {@link CliMergeParser} instance and sets the input arguments. + * + * @param args the command line arguments + * @param console the console we should use for password input + */ + public CliMergeParser(final String[] args, final Console console) { + super(Task.MERGE, args, commandLine -> { + final var deleteObsolete = Boolean.parseBoolean(commandLine.getOptionValue(DELETE_OBSOLETE, "false")); + final var backupSource = Path.of(commandLine.getOptionValue(BACKUP_SOURCE)).toAbsolutePath(); + final var prefix = commandLine.getOptionValue(PREFIX); + final var fromTime = Long.parseLong(commandLine.getOptionValue(FROM_EPOCH_SECONDS)); + final var toTime = Long.parseLong(commandLine.getOptionValue(TO_EPOCH_SECONDS)); + final var keyProperties = parseKeyProperties(console, commandLine); + return MergeProperties.builder() + .deleteObsoleteFiles(deleteObsolete) + .backupSource(backupSource) + .keyProperties(keyProperties) + .prefix(prefix) + .fromTimeEpochSeconds(fromTime) + .toTimeEpochSeconds(toTime) + .build(); + }); + } + + @Override + protected Options createOptions() { + return super.createOptions() + .addOption(Option.builder() + .longOpt(DELETE_OBSOLETE) + .numberOfArgs(1) + .argName("boolean") + .type(Boolean.class) + .required(false) + .desc("Allow deleting the backup files which are no longer needed, because their contents were merged.") + .build()) + .addOption(Option.builder() + .longOpt(FROM_EPOCH_SECONDS) + .numberOfArgs(1) + .argName("epoch_seconds") + .required(true) + .type(Long.class) + .desc("The date and time using UTC epoch seconds identifying the first increment we want to merge.") + .build()) + .addOption(Option.builder() + .longOpt(TO_EPOCH_SECONDS) + .numberOfArgs(1) + .argName("epoch_seconds") + .required(true) + .type(Long.class) + .desc("The date and time using UTC epoch seconds identifying the last increment we want to merge.") + .build()); + } +} diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliTaskParser.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliTaskParser.java index 1b53bd8..816c0d9 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliTaskParser.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/CliTaskParser.java @@ -33,6 +33,9 @@ protected Options createOptions() { .addOption(Option.builder() .longOpt(Task.RESTORE.getCommand()) .desc("Restore a backup.").build()) + .addOption(Option.builder() + .longOpt(Task.MERGE.getCommand()) + .desc("Merge increments of a backup.").build()) .addOption(Option.builder() .longOpt(Task.GEN_KEYS.getCommand()) .desc("Generates a key pair for the encryption.").build()) diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/MergeProperties.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/MergeProperties.java new file mode 100644 index 0000000..0ccfb5a --- /dev/null +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/MergeProperties.java @@ -0,0 +1,17 @@ +package com.github.nagyesta.filebarj.job.cli; + +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.experimental.SuperBuilder; + +/** + * The parsed command line arguments of a merge task. + */ +@Data +@SuperBuilder +@EqualsAndHashCode(callSuper = true) +public class MergeProperties extends BackupFileProperties { + private final boolean deleteObsoleteFiles; + private final long fromTimeEpochSeconds; + private final long toTimeEpochSeconds; +} diff --git a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/Task.java b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/Task.java index e169baa..03493a6 100644 --- a/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/Task.java +++ b/file-barj-job/src/main/java/com/github/nagyesta/filebarj/job/cli/Task.java @@ -16,6 +16,10 @@ public enum Task { * Restoring the contents of a backup. */ RESTORE("restore"), + /** + * Merging the contents of some backup increments. + */ + MERGE("merge"), /** * Generating a key pair for the encryption. */ diff --git a/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/ControllerIntegrationTest.java b/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/ControllerIntegrationTest.java index 52dbf16..61c7a9b 100644 --- a/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/ControllerIntegrationTest.java +++ b/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/ControllerIntegrationTest.java @@ -26,6 +26,7 @@ class ControllerIntegrationTest extends TempFileAwareTest { private static final long A_SECOND = 1000; + @SuppressWarnings("checkstyle:MethodLength") @Test void testEndToEndFlowShouldWorkWhenCalledWithValidParameters() throws Exception { //given @@ -69,7 +70,11 @@ void testEndToEndFlowShouldWorkWhenCalledWithValidParameters() throws Exception }; //when backup is executed + var start = Instant.now().getEpochSecond(); new Controller(backupArgs, console).run(); + if (!Files.exists(backupDirectory.resolve(prefix + "-" + start + ".manifest.cargo"))) { + start++; + } final var atEpochSeconds = Instant.now().getEpochSecond(); Thread.sleep(A_SECOND); @@ -77,7 +82,11 @@ void testEndToEndFlowShouldWorkWhenCalledWithValidParameters() throws Exception Files.writeString(txt, modifiedTxtContent); //when another backup increment is executed + var end = Instant.now().getEpochSecond(); new Controller(backupArgs, console).run(); + if (!Files.exists(backupDirectory.resolve(prefix + "-" + end + ".manifest.cargo"))) { + end++; + } //given we prepare for restore final var restoreDirectory = testDataRoot.resolve("restore"); @@ -150,5 +159,34 @@ void testEndToEndFlowShouldWorkWhenCalledWithValidParameters() throws Exception .endsWith(FilenameUtils.separatorsToUnix(originalDirectory.toAbsolutePath().toString()))); Assertions.assertTrue(actualContent.get(2) .endsWith(FilenameUtils.separatorsToUnix(txt.toAbsolutePath().toString()))); + + //given we merge the versions + Assertions.assertTrue(Files.exists(backupDirectory.resolve(prefix + "-" + start + ".manifest.cargo")), + "Full backup manifest should exist"); + Assertions.assertTrue(Files.exists(backupDirectory.resolve(prefix + "-" + end + ".manifest.cargo")), + "Incremental backup manifest should exist"); + Assertions.assertFalse(Files.exists(backupDirectory.resolve(prefix + "-" + start + "-" + end + ".manifest.cargo")), + "Merged backup manifest should not exist"); + final var mergeArgs = new String[]{ + "--merge", + "--backup-source", backupDirectory.toString(), + "--prefix", prefix, + "--key-store", keyStore.toString(), + "--key-alias", alias, + "--delete-obsolete", "true", + "--from-epoch-seconds", start + "", + "--to-epoch-seconds", end + "" + }; + + //when inspect increments is executed + new Controller(mergeArgs, console).run(); + + //then the merged manifest exists + Assertions.assertFalse(Files.exists(backupDirectory.resolve(prefix + "-" + start + ".manifest.cargo")), + "Full backup manifest should not exist"); + Assertions.assertFalse(Files.exists(backupDirectory.resolve(prefix + "-" + end + ".manifest.cargo")), + "Incremental backup manifest should not exist"); + Assertions.assertTrue(Files.exists(backupDirectory.resolve(prefix + "-" + start + "-" + end + ".manifest.cargo")), + "Merged backup manifest should exist"); } } diff --git a/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/cli/CliMergeParserTest.java b/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/cli/CliMergeParserTest.java new file mode 100644 index 0000000..dfdb2d3 --- /dev/null +++ b/file-barj-job/src/test/java/com/github/nagyesta/filebarj/job/cli/CliMergeParserTest.java @@ -0,0 +1,176 @@ +package com.github.nagyesta.filebarj.job.cli; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.io.Console; +import java.nio.file.Path; + +import static org.mockito.Mockito.*; + +class CliMergeParserTest { + + @Test + void testConstructorShouldThrowExceptionWhenNoArgsArePassed() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, () -> new CliMergeParser(new String[0], mock(Console.class))); + + //then + exception + } + + @Test + void testConstructorShouldThrowExceptionWhenUnknownOptionIsPassed() { + //given + + //when + Assertions.assertThrows(IllegalArgumentException.class, () -> new CliMergeParser(new String[]{"--unknown"}, mock(Console.class))); + + //then + exception + } + + @Test + void testConstructorShouldThrowExceptionWhenRequiredParameterIsMissing() { + //given + final var args = new String[]{"--delete-obsolete", "true"}; + + //when + Assertions.assertThrows(IllegalArgumentException.class, () -> new CliMergeParser(args, mock(Console.class))); + + //then + exception + } + + @Test + void testConstructorShouldNotTouchTheConsoleWhenKeyStoreParametersAreNotPassed() { + //given + final var prefix = "prefix"; + final var backup = Path.of("backup-dir"); + final var delete = false; + final var from = 123L; + final var to = 234L; + final var args = new String[]{ + "--delete-obsolete", String.valueOf(delete), + "--backup-source", backup.toString(), + "--prefix", prefix, + "--from-epoch-seconds", String.valueOf(from), + "--to-epoch-seconds", String.valueOf(to) + }; + final var console = mock(Console.class); + + //when + new CliMergeParser(args, console); + + //then + verifyNoInteractions(console); + } + + @Test + void testConstructorShouldCaptureAndSetPropertiesWhenKeyStoreParametersAreNotPassed() { + //given + final var prefix = "prefix"; + final var backup = Path.of("backup-dir"); + final var delete = true; + final var from = 123L; + final var to = 234L; + final var args = new String[]{ + "--delete-obsolete", String.valueOf(delete), + "--backup-source", backup.toString(), + "--prefix", prefix, + "--from-epoch-seconds", String.valueOf(from), + "--to-epoch-seconds", String.valueOf(to) + }; + final var console = mock(Console.class); + + //when + final var underTest = new CliMergeParser(args, console); + final var actual = underTest.getResult(); + + //then + Assertions.assertEquals(delete, actual.isDeleteObsoleteFiles()); + Assertions.assertEquals(backup.toAbsolutePath(), actual.getBackupSource()); + Assertions.assertEquals(prefix, actual.getPrefix()); + Assertions.assertEquals(from, actual.getFromTimeEpochSeconds()); + Assertions.assertEquals(to, actual.getToTimeEpochSeconds()); + Assertions.assertNull(actual.getKeyProperties()); + } + + @Test + void testConstructorShouldCaptureAndSetPropertiesWhenAllKeyStoreParametersArePassed() { + //given + final var prefix = "prefix"; + final var backup = Path.of("backup-dir"); + final var store = Path.of("key-store.p12"); + final var alias = "alias"; + final var password = new char[]{'a', 'b', 'c'}; + final var delete = true; + final var from = 123L; + final var to = 234L; + final var args = new String[]{ + "--delete-obsolete", String.valueOf(delete), + "--backup-source", backup.toString(), + "--prefix", prefix, + "--from-epoch-seconds", String.valueOf(from), + "--to-epoch-seconds", String.valueOf(to), + "--key-store", store.toString(), + "--key-alias", alias + }; + final var console = mock(Console.class); + when(console.readPassword(anyString())).thenReturn(password); + + //when + final var underTest = new CliMergeParser(args, console); + final var actual = underTest.getResult(); + + //then + Assertions.assertEquals(delete, actual.isDeleteObsoleteFiles()); + Assertions.assertEquals(backup.toAbsolutePath(), actual.getBackupSource()); + Assertions.assertEquals(prefix, actual.getPrefix()); + Assertions.assertEquals(from, actual.getFromTimeEpochSeconds()); + Assertions.assertEquals(to, actual.getToTimeEpochSeconds()); + Assertions.assertNotNull(actual.getKeyProperties()); + Assertions.assertEquals(store.toAbsolutePath(), actual.getKeyProperties().getKeyStore()); + Assertions.assertEquals(alias, actual.getKeyProperties().getAlias()); + Assertions.assertArrayEquals(password, actual.getKeyProperties().getPassword()); + } + + @Test + void testConstructorShouldCaptureAndSetPropertiesWhenRequiredKeyStoreParametersArePassed() { + //given + //given + final var prefix = "prefix"; + final var backup = Path.of("backup-dir"); + final var store = Path.of("key-store.p12"); + final var alias = "default"; + final var password = new char[]{'a', 'b', 'c'}; + final var delete = true; + final var from = 123L; + final var to = 234L; + final var args = new String[]{ + "--delete-obsolete", String.valueOf(delete), + "--backup-source", backup.toString(), + "--prefix", prefix, + "--from-epoch-seconds", String.valueOf(from), + "--to-epoch-seconds", String.valueOf(to), + "--key-store", store.toString() + }; + final var console = mock(Console.class); + when(console.readPassword(anyString())).thenReturn(password); + + //when + final var underTest = new CliMergeParser(args, console); + final var actual = underTest.getResult(); + + //then + Assertions.assertEquals(delete, actual.isDeleteObsoleteFiles()); + Assertions.assertEquals(backup.toAbsolutePath(), actual.getBackupSource()); + Assertions.assertEquals(prefix, actual.getPrefix()); + Assertions.assertEquals(from, actual.getFromTimeEpochSeconds()); + Assertions.assertEquals(to, actual.getToTimeEpochSeconds()); + Assertions.assertNotNull(actual.getKeyProperties()); + Assertions.assertEquals(store.toAbsolutePath(), actual.getKeyProperties().getKeyStore()); + Assertions.assertEquals(alias, actual.getKeyProperties().getAlias()); + Assertions.assertArrayEquals(password, actual.getKeyProperties().getPassword()); + } + +} diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java index eb70812..0aca8fe 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/BarjCargoArchiverFileOutputStream.java @@ -2,7 +2,6 @@ import com.github.nagyesta.filebarj.io.stream.internal.BaseBarjCargoArchiverFileOutputStream; import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; -import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.jetbrains.annotations.NotNull; @@ -21,7 +20,6 @@ @Slf4j public class BarjCargoArchiverFileOutputStream extends BaseBarjCargoArchiverFileOutputStream { - @Getter private final Path indexFile; private final FileOutputStream indexStream; private final BufferedOutputStream indexBufferedStream; diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java index 39a1764..8afad8c 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/ParallelBarjCargoArchiverFileOutputStream.java @@ -6,6 +6,7 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FilenameUtils; +import org.apache.commons.io.IOUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -178,18 +179,17 @@ public CompletableFuture addDirectoryEntityAsync( /** * Merges the content and metadata streams of the given entity into this stream. * - * @param boundaryMetadata The metadata of the entity - * @param contentStream The content stream (can be null in case of directories) - * @param metadataStream The metadata stream + * @param boundaryMetadata The metadata of the entity + * @param contentAndMetadataStream The stream containing the content stream and metadata stream + * one after the other * @return The boundary of the entity */ public CompletableFuture mergeEntityAsync( @NonNull final BarjCargoBoundarySource boundaryMetadata, - @Nullable final InputStream contentStream, - @NonNull final InputStream metadataStream) { + @NonNull final InputStream contentAndMetadataStream) { return CompletableFuture.supplyAsync(() -> { try { - return super.mergeEntity(boundaryMetadata, contentStream, metadataStream); + return super.mergeEntity(boundaryMetadata, contentAndMetadataStream); } catch (final IOException e) { throw new CompletionException(e); } @@ -276,10 +276,9 @@ public BarjCargoBoundarySource addDirectoryEntity( @Override public BarjCargoBoundarySource mergeEntity( @NotNull final BarjCargoBoundarySource boundaryMetadata, - @Nullable final InputStream contentStream, - @NotNull final InputStream metadataStream) throws IOException { + @NotNull final InputStream contentAndMetadataStream) throws IOException { try { - return this.mergeEntityAsync(boundaryMetadata, contentStream, metadataStream).join(); + return this.mergeEntityAsync(boundaryMetadata, contentAndMetadataStream).join(); } catch (final CompletionException ex) { unwrapIoException(ex); return null; @@ -320,15 +319,14 @@ private Function mergeEntityFr throw new IllegalStateException("Temporary stream is null for " + entity.getPath()); } log.debug("Merging temp file {} into {}", stream.getCurrentFilePath(), entity.getPath()); - super.mergeEntity(entity, - stream.getStream(entity.getContentBoundary()), - stream.getStream(entity.getMetadataBoundary())); + super.mergeEntity(entity, stream.getStream(entity.getContentBoundary(), entity.getMetadataBoundary())); log.debug("Merged temp file {} into {}", stream.getCurrentFilePath(), entity.getPath()); return entity; } catch (final Exception e) { throw new CompletionException(e); } finally { Optional.ofNullable(tempStream.get()).ifPresent(stream -> { + IOUtils.closeQuietly(stream); try { stream.delete(); } catch (final IOException ignore) { diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java index 5032f9f..6dd9672 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/BaseBarjCargoArchiverFileOutputStream.java @@ -11,6 +11,7 @@ import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; +import org.apache.commons.io.input.CloseShieldInputStream; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -20,10 +21,12 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Consumer; import static org.apache.commons.io.FilenameUtils.normalizeNoEndSeparator; import static org.apache.commons.io.FilenameUtils.separatorsToUnix; @@ -180,16 +183,15 @@ public BarjCargoBoundarySource addDirectoryEntity( /** * Merges an entity from another stream into this stream. * - * @param boundaryMetadata The metadata of the entity - * @param contentStream The content stream (null when the entity is a directory) - * @param metadataStream The metadata stream + * @param boundaryMetadata The metadata of the entity + * @param contentAndMetadataStream The stream containing the content stream (null when the + * entity is a directory) and the metadata stream after it * @return An object with the entity boundaries * @throws IOException When an IO exception occurs during the write operation */ public BarjCargoBoundarySource mergeEntity( @NonNull final BarjCargoBoundarySource boundaryMetadata, - @Nullable final InputStream contentStream, - @NonNull final InputStream metadataStream) throws IOException { + @NotNull final InputStream contentAndMetadataStream) throws IOException { if (this.hasOpenEntity()) { throw new IllegalStateException("Entity is already open."); } @@ -205,19 +207,16 @@ public BarjCargoBoundarySource mergeEntity( .fileType(fileType) .encrypted(boundaryMetadata.isEncrypted()); if (fileType != FileType.DIRECTORY) { - if (contentStream == null) { - throw new IllegalArgumentException("Content stream must not be null."); - } - final var boundaries = mergePart(boundaryMetadata.getContentBoundary(), contentStream); - resultBuilder.contentBoundary(boundaries); + doMerge(boundaryMetadata.getContentBoundary(), contentAndMetadataStream, resultBuilder::contentBoundary); } - final var boundaries = mergePart(boundaryMetadata.getMetadataBoundary(), metadataStream); - resultBuilder.metadataBoundary(boundaries); + //merge the remaining part (notice, the content part was already read form the input stream) + doMerge(boundaryMetadata.getMetadataBoundary(), contentAndMetadataStream, resultBuilder::metadataBoundary); final var boundarySource = resultBuilder.build(); this.entryCount.incrementAndGet(); this.doOnEntityClosed(getEntityToIndex(boundarySource)); return boundarySource; } finally { + IOUtils.closeQuietly(contentAndMetadataStream); entityLock.unlock(); } } @@ -384,6 +383,21 @@ protected void assertEntityNameIsValidAndUnique( existingEntities.put(archiveEntityPath, fileType); } + private void doMerge( + @NotNull final BarjCargoEntryBoundaries boundary, + @NotNull final InputStream contentAndMetadataStream, + @NotNull final Consumer resultConsumer) throws IOException { + final var start = boundary.getAbsoluteStartIndexInclusive(); + final var length = boundary.getAbsoluteEndIndexExclusive() - start; + final var hash = boundary.getArchivedHash(); + try (var sourceStream = CloseShieldInputStream.wrap(new FixedRangeInputStream(contentAndMetadataStream, 0, length)); + var archivedDataStream = new CompositeRestoreStream(sourceStream, hashAlgorithm, List.of(), hash) + ) { + final var boundaries = mergePart(boundary, archivedDataStream); + resultConsumer.accept(boundaries); + } + } + private BarjCargoEntryBoundaries mergePart( @NotNull final BarjCargoEntryBoundaries boundary, @NonNull final InputStream stream) throws IOException { diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java index a2b9eeb..b72661f 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStream.java @@ -34,18 +34,18 @@ public TempBarjCargoArchiverFileOutputStream( } /** - * Returns an input stream for the given part of the archived entity. + * Returns an input stream for the content and the metadata part of the archived entity. * - * @param boundaries defines the boundaries of the part + * @param content defines the boundaries of the content part + * @param metadata defines the boundaries of the metadata part * @return an input stream * @throws IOException If the input stream cannot be created */ public InputStream getStream( - @NonNull final BarjCargoEntryBoundaries boundaries) throws IOException { - final var start = boundaries.getAbsoluteStartIndexInclusive(); - final var length = boundaries.getAbsoluteEndIndexExclusive() - start; - return new FixedRangeInputStream(new MergingFileInputStream(getDataFilesWritten()), - start, length); + @NonNull final BarjCargoEntryBoundaries content, @NonNull final BarjCargoEntryBoundaries metadata) throws IOException { + final var start = content.getAbsoluteStartIndexInclusive(); + final var length = metadata.getAbsoluteEndIndexExclusive() - start; + return new FixedRangeInputStream(new MergingFileInputStream(getDataFilesWritten()), start, length); } /** diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java index a7e674a..2c0e34c 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/internal/model/BarjCargoEntityIndex.java @@ -1,5 +1,6 @@ package com.github.nagyesta.filebarj.io.stream.internal.model; +import com.github.nagyesta.filebarj.io.stream.BarjCargoBoundarySource; import com.github.nagyesta.filebarj.io.stream.enums.FileType; import lombok.Builder; import lombok.Data; @@ -19,7 +20,7 @@ */ @Data @Builder -public class BarjCargoEntityIndex { +public class BarjCargoEntityIndex implements BarjCargoBoundarySource { private static final String PATH = ".path"; private static final String TYPE = ".type"; @@ -86,4 +87,14 @@ private static BarjCargoEntryBoundaries parseBoundary( return null; } } + + @Override + public BarjCargoEntryBoundaries getContentBoundary() { + return content; + } + + @Override + public BarjCargoEntryBoundaries getMetadataBoundary() { + return metadata; + } } diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/BarjCargoArchiveEntry.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/BarjCargoArchiveEntry.java index c7d4d6e..4eac8ab 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/BarjCargoArchiveEntry.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/BarjCargoArchiveEntry.java @@ -54,6 +54,15 @@ public interface BarjCargoArchiveEntry { */ @Nullable String getMetadata(@Nullable SecretKey key) throws IOException; + /** + * Streams the archived content and metadata of an entry without decryption or decompression. + * + * @return the raw content and metadata + * @throws IOException When the content cannot be read + */ + @NotNull + InputStream getRawContentAndMetadata() throws IOException; + /** * Skips the content of an entry. Does nothing if called on a directory entry. * @throws IOException When the content cannot be read through. diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java index b94f602..33025ed 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/RandomAccessBarjCargoArchiveEntry.java @@ -2,6 +2,7 @@ import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiveFileInputStreamSource; import com.github.nagyesta.filebarj.io.stream.enums.FileType; +import com.github.nagyesta.filebarj.io.stream.internal.FixedRangeInputStream; import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; import lombok.EqualsAndHashCode; import lombok.NonNull; @@ -69,6 +70,14 @@ public String getMetadata(@Nullable final SecretKey key) throws IOException { } } + @Override + @NotNull + public InputStream getRawContentAndMetadata() throws IOException { + final var start = entityIndex.getContentOrElseMetadata().getAbsoluteStartIndexInclusive(); + final var length = entityIndex.getMetadata().getAbsoluteEndIndexExclusive() - start; + return new FixedRangeInputStream(source.openStreamForSequentialAccess(), start, length); + } + @Override public void skipContent() throws IOException { //noop diff --git a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java index b0bdd4c..7cc552e 100644 --- a/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java +++ b/file-barj-stream-io/src/main/java/com/github/nagyesta/filebarj/io/stream/model/SequentialBarjCargoArchiveEntry.java @@ -3,10 +3,13 @@ import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiveEntryIterator; import com.github.nagyesta.filebarj.io.stream.BarjCargoArchiveFileInputStreamSource; import com.github.nagyesta.filebarj.io.stream.enums.FileType; +import com.github.nagyesta.filebarj.io.stream.internal.FixedRangeInputStream; import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntityIndex; import lombok.EqualsAndHashCode; +import lombok.Getter; import lombok.NonNull; import lombok.ToString; +import org.apache.commons.io.input.CloseShieldInputStream; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -25,6 +28,7 @@ public class SequentialBarjCargoArchiveEntry implements BarjCargoArchiveEntry { @NonNull private final BarjCargoArchiveFileInputStreamSource source; private final BarjCargoArchiveEntryIterator iterator; + @Getter @NonNull private final BarjCargoEntityIndex entityIndex; @@ -89,6 +93,14 @@ public String getMetadata(@Nullable final SecretKey key) throws IOException { } } + @Override + @NotNull + public InputStream getRawContentAndMetadata() throws IOException { + final var start = entityIndex.getContentOrElseMetadata().getAbsoluteStartIndexInclusive(); + final var length = entityIndex.getMetadata().getAbsoluteEndIndexExclusive() - start; + return CloseShieldInputStream.wrap(new FixedRangeInputStream(iterator.getStream(), 0, length)); + } + @Override public void skipContent() throws IOException { if (getFileType() == FileType.DIRECTORY) { diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java index bbe61ac..00aff62 100644 --- a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/BarjCargoArchiverFileOutputStreamIntegrationTest.java @@ -368,7 +368,6 @@ void testMergeEntityShouldThrowExceptionWhenTheEntryIsAlreadyOpen() Assertions.assertThrows(IllegalStateException.class, () -> underTest.mergeEntity( mock(BarjCargoBoundarySource.class), - InputStream.nullInputStream(), InputStream.nullInputStream())); //then + exception @@ -409,16 +408,12 @@ void testMergeEntityShouldCopyFileContentAndMetadataWhenTheCalledWithValidFileEn final var entity = stream.addFileEntity(key, contentStream, secretKey, metadata); stream.close(); try (var contentFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var contentArchived = new FixedRangeInputStream(contentFile, + var contentAndMetadataArchived = new FixedRangeInputStream(contentFile, entity.getContentBoundary().getAbsoluteStartIndexInclusive(), - entity.getContentBoundary().getArchivedSizeBytes()); - var metadataFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var metadataArchived = new FixedRangeInputStream(metadataFile, - entity.getMetadataBoundary().getAbsoluteStartIndexInclusive(), - entity.getMetadataBoundary().getArchivedSizeBytes()); + entity.getContentBoundary().getArchivedSizeBytes() + entity.getMetadataBoundary().getArchivedSizeBytes()); var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { //when - underTest.mergeEntity(entity, contentArchived, metadataArchived); + underTest.mergeEntity(entity, contentAndMetadataArchived); //then underTest.close(); @@ -469,7 +464,7 @@ void testMergeEntityShouldCopyDirectoryMetadataWhenTheCalledWithValidDirectoryEn entity.getMetadataBoundary().getArchivedSizeBytes()); var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { //when - underTest.mergeEntity(entity, null, metadataArchived); + underTest.mergeEntity(entity, metadataArchived); //then underTest.close(); @@ -480,48 +475,9 @@ void testMergeEntityShouldCopyDirectoryMetadataWhenTheCalledWithValidDirectoryEn } } - @Test - void testMergeEntityShouldThrowExceptionWhenTheCalledWithLinkEntityAndNullContentStream() - throws IOException { - //given - final var sourceConfig = BarjCargoOutputStreamConfiguration.builder() - .prefix("integration-test-source") - .compressionFunction(GZIPOutputStream::new) - .hashAlgorithm("sha-256") - .folder(super.getTestDataRoot()) - .indexEncryptionKey(null) - .build(); - final var targetConfig = BarjCargoOutputStreamConfiguration.builder() - .prefix("integration-test-target") - .compressionFunction(GZIPOutputStream::new) - .hashAlgorithm("sha-256") - .folder(super.getTestDataRoot()) - .indexEncryptionKey(null) - .build(); - final var key = "/key"; - final var target = "target"; - final var metadata = "metadata"; - final var secretKey = EncryptionUtil.generateAesKey(); - try (var stream = new BarjCargoArchiverFileOutputStream(sourceConfig)) { - final var entity = stream.addSymbolicLinkEntity(key, target, secretKey, metadata); - stream.close(); - try (var metadataFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var metadataArchived = new FixedRangeInputStream(metadataFile, - entity.getMetadataBoundary().getAbsoluteStartIndexInclusive(), - entity.getMetadataBoundary().getArchivedSizeBytes()); - var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { - //when - Assertions.assertThrows(IllegalArgumentException.class, - () -> underTest.mergeEntity(entity, null, metadataArchived)); - - //then + exception - } - } - } - @SuppressWarnings("DataFlowIssue") @Test - void testMergeEntityShouldThrowExceptionWhenTheCalledWithNullMetadataStream() + void testMergeEntityShouldThrowExceptionWhenTheCalledWithLinkEntityAndNullStream() throws IOException { //given final var sourceConfig = BarjCargoOutputStreamConfiguration.builder() @@ -545,14 +501,10 @@ void testMergeEntityShouldThrowExceptionWhenTheCalledWithNullMetadataStream() try (var stream = new BarjCargoArchiverFileOutputStream(sourceConfig)) { final var entity = stream.addSymbolicLinkEntity(key, target, secretKey, metadata); stream.close(); - try (var contentFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var contentArchived = new FixedRangeInputStream(contentFile, - entity.getContentBoundary().getAbsoluteStartIndexInclusive(), - entity.getContentBoundary().getArchivedSizeBytes()); - var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { + try (var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { //when Assertions.assertThrows(IllegalArgumentException.class, - () -> underTest.mergeEntity(entity, contentArchived, null)); + () -> underTest.mergeEntity(entity, null)); //then + exception } @@ -574,8 +526,7 @@ void testMergeEntityShouldThrowExceptionWhenTheCalledWithNullEntity() try (var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { //when Assertions.assertThrows(IllegalArgumentException.class, - () -> underTest.mergeEntity(null, - InputStream.nullInputStream(), InputStream.nullInputStream())); + () -> underTest.mergeEntity(null, InputStream.nullInputStream())); //then + exception } @@ -756,7 +707,7 @@ void testWritingExampleFileShouldProduceExpectedOutput() throws IOException, URI stream.close(); //then - final var actualIndexLines = Files.readAllLines(stream.getIndexFile()); + final var actualIndexLines = Files.readAllLines(stream.getIndexFileWritten()); final var expectedIndexLines = Files.readAllLines( Path.of(getClass().getResource("/example/index.properties").toURI())); Assertions.assertIterableEquals(expectedIndexLines, actualIndexLines); diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/ParallelBarjCargoArchiverFileOutputStreamIntegrationTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/ParallelBarjCargoArchiverFileOutputStreamIntegrationTest.java index 64496f6..4beab29 100644 --- a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/ParallelBarjCargoArchiverFileOutputStreamIntegrationTest.java +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/ParallelBarjCargoArchiverFileOutputStreamIntegrationTest.java @@ -364,10 +364,7 @@ void testMergeEntityShouldThrowExceptionWhenTheEntryIsAlreadyOpen() //when underTest.openEntity("/key1", FileType.REGULAR_FILE, null); Assertions.assertThrows(IllegalStateException.class, - () -> underTest.mergeEntity( - mock(BarjCargoBoundarySource.class), - InputStream.nullInputStream(), - InputStream.nullInputStream())); + () -> underTest.mergeEntity(mock(BarjCargoBoundarySource.class), InputStream.nullInputStream())); //then + exception } @@ -407,16 +404,12 @@ void testMergeEntityShouldCopyFileContentAndMetadataWhenTheCalledWithValidFileEn final var entity = stream.addFileEntity(key, contentStream, secretKey, metadata); stream.close(); try (var contentFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var contentArchived = new FixedRangeInputStream(contentFile, + var contentAndMetadataArchived = new FixedRangeInputStream(contentFile, entity.getContentBoundary().getAbsoluteStartIndexInclusive(), - entity.getContentBoundary().getArchivedSizeBytes()); - var metadataFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var metadataArchived = new FixedRangeInputStream(metadataFile, - entity.getMetadataBoundary().getAbsoluteStartIndexInclusive(), - entity.getMetadataBoundary().getArchivedSizeBytes()); + entity.getContentBoundary().getArchivedSizeBytes() + entity.getMetadataBoundary().getArchivedSizeBytes()); var underTest = new ParallelBarjCargoArchiverFileOutputStream(targetConfig, 1)) { //when - underTest.mergeEntity(entity, contentArchived, metadataArchived); + underTest.mergeEntity(entity, contentAndMetadataArchived); //then underTest.close(); @@ -467,7 +460,7 @@ void testMergeEntityShouldCopyDirectoryMetadataWhenTheCalledWithValidDirectoryEn entity.getMetadataBoundary().getArchivedSizeBytes()); var underTest = new ParallelBarjCargoArchiverFileOutputStream(targetConfig, 1)) { //when - underTest.mergeEntity(entity, null, metadataArchived); + underTest.mergeEntity(entity, metadataArchived); //then underTest.close(); @@ -478,48 +471,9 @@ void testMergeEntityShouldCopyDirectoryMetadataWhenTheCalledWithValidDirectoryEn } } - @Test - void testMergeEntityShouldThrowExceptionWhenTheCalledWithLinkEntityAndNullContentStream() - throws IOException { - //given - final var sourceConfig = BarjCargoOutputStreamConfiguration.builder() - .prefix("integration-test-source") - .compressionFunction(GZIPOutputStream::new) - .hashAlgorithm("sha-256") - .folder(super.getTestDataRoot()) - .indexEncryptionKey(null) - .build(); - final var targetConfig = BarjCargoOutputStreamConfiguration.builder() - .prefix("integration-test-target") - .compressionFunction(GZIPOutputStream::new) - .hashAlgorithm("sha-256") - .folder(super.getTestDataRoot()) - .indexEncryptionKey(null) - .build(); - final var key = "/key"; - final var target = "target"; - final var metadata = "metadata"; - final var secretKey = EncryptionUtil.generateAesKey(); - try (var stream = new ParallelBarjCargoArchiverFileOutputStream(sourceConfig, 1)) { - final var entity = stream.addSymbolicLinkEntity(key, target, secretKey, metadata); - stream.close(); - try (var metadataFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var metadataArchived = new FixedRangeInputStream(metadataFile, - entity.getMetadataBoundary().getAbsoluteStartIndexInclusive(), - entity.getMetadataBoundary().getArchivedSizeBytes()); - var underTest = new ParallelBarjCargoArchiverFileOutputStream(targetConfig, 1)) { - //when - Assertions.assertThrows(IllegalArgumentException.class, - () -> underTest.mergeEntity(entity, null, metadataArchived)); - - //then + exception - } - } - } - @SuppressWarnings("DataFlowIssue") @Test - void testMergeEntityShouldThrowExceptionWhenTheCalledWithNullMetadataStream() + void testMergeEntityShouldThrowExceptionWhenTheCalledWithLinkEntityAndNullStream() throws IOException { //given final var sourceConfig = BarjCargoOutputStreamConfiguration.builder() @@ -543,14 +497,10 @@ void testMergeEntityShouldThrowExceptionWhenTheCalledWithNullMetadataStream() try (var stream = new ParallelBarjCargoArchiverFileOutputStream(sourceConfig, 1)) { final var entity = stream.addSymbolicLinkEntity(key, target, secretKey, metadata); stream.close(); - try (var contentFile = new FileInputStream(stream.getDataFilesWritten().get(0).toString()); - var contentArchived = new FixedRangeInputStream(contentFile, - entity.getContentBoundary().getAbsoluteStartIndexInclusive(), - entity.getContentBoundary().getArchivedSizeBytes()); - var underTest = new ParallelBarjCargoArchiverFileOutputStream(targetConfig, 1)) { + try (var underTest = new ParallelBarjCargoArchiverFileOutputStream(targetConfig, 1)) { //when Assertions.assertThrows(IllegalArgumentException.class, - () -> underTest.mergeEntity(entity, contentArchived, null)); + () -> underTest.mergeEntity(entity, null)); //then + exception } @@ -572,8 +522,7 @@ void testMergeEntityShouldThrowExceptionWhenTheCalledWithNullEntity() try (var underTest = new ParallelBarjCargoArchiverFileOutputStream(targetConfig, 1)) { //when Assertions.assertThrows(IllegalArgumentException.class, - () -> underTest.mergeEntity(null, - InputStream.nullInputStream(), InputStream.nullInputStream())); + () -> underTest.mergeEntity(null, InputStream.nullInputStream())); //then + exception } diff --git a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStreamIntegrationTest.java b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStreamIntegrationTest.java index da5bb71..6fe6e54 100644 --- a/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStreamIntegrationTest.java +++ b/file-barj-stream-io/src/test/java/com/github/nagyesta/filebarj/io/stream/internal/TempBarjCargoArchiverFileOutputStreamIntegrationTest.java @@ -6,6 +6,7 @@ import com.github.nagyesta.filebarj.io.stream.BarjCargoInputStreamConfiguration; import com.github.nagyesta.filebarj.io.stream.BarjCargoOutputStreamConfiguration; import com.github.nagyesta.filebarj.io.stream.crypto.EncryptionUtil; +import com.github.nagyesta.filebarj.io.stream.internal.model.BarjCargoEntryBoundaries; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -16,6 +17,8 @@ import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; +import static org.mockito.Mockito.mock; + class TempBarjCargoArchiverFileOutputStreamIntegrationTest extends TempFileAwareTest { @Test @@ -54,8 +57,7 @@ void testMergeEntityShouldCopyFileContentAndMetadataWhenTheCalledWithValidFileEn try (var underTest = new BarjCargoArchiverFileOutputStream(targetConfig)) { //when underTest.mergeEntity(entity, - stream.getStream(entity.getContentBoundary()), - stream.getStream(entity.getMetadataBoundary())); + stream.getStream(entity.getContentBoundary(), entity.getMetadataBoundary())); //then underTest.close(); @@ -95,7 +97,36 @@ void testDeleteTempArchiverShouldDeleteTempFileWhenCalled() @SuppressWarnings("DataFlowIssue") @Test - void testGetStreamOfTempArchiverShouldThrowExceptionWhenCalledWithNull() + void testGetStreamOfTempArchiverShouldThrowExceptionWhenCalledWithNullContentBoundary() + throws IOException { + //given + final var sourceConfig = BarjCargoOutputStreamConfiguration.builder() + .prefix("integration-test-source") + .compressionFunction(GZIPOutputStream::new) + .hashAlgorithm("sha-256") + .folder(super.getTestDataRoot()) + .indexEncryptionKey(null) + .build(); + final var key = "/key"; + final var content = "test"; + final var metadata = "metadata"; + final var secretKey = EncryptionUtil.generateAesKey(); + try (var underTest = new TempBarjCargoArchiverFileOutputStream(sourceConfig, key)) { + final var contentStream = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); + underTest.addFileEntity(key, contentStream, secretKey, metadata); + underTest.close(); + + //when + Assertions.assertThrows(IllegalArgumentException.class, + () -> underTest.getStream(null, mock(BarjCargoEntryBoundaries.class))); + + //then + exception + } + } + + @SuppressWarnings("DataFlowIssue") + @Test + void testGetStreamOfTempArchiverShouldThrowExceptionWhenCalledWithNullMetadataBoundary() throws IOException { //given final var sourceConfig = BarjCargoOutputStreamConfiguration.builder() @@ -115,7 +146,8 @@ void testGetStreamOfTempArchiverShouldThrowExceptionWhenCalledWithNull() underTest.close(); //when - Assertions.assertThrows(IllegalArgumentException.class, () -> underTest.getStream(null)); + Assertions.assertThrows(IllegalArgumentException.class, + () -> underTest.getStream(mock(BarjCargoEntryBoundaries.class), null)); //then + exception }