diff --git a/src/git/http.ts b/src/git/http.ts index 1f1b7d6b8..3b4f83bf4 100644 --- a/src/git/http.ts +++ b/src/git/http.ts @@ -5,6 +5,7 @@ import type { ObjectIdList, } from './types'; import type { EncryptedFS } from 'encryptedfs'; +import type { PackObjectsResult } from 'isomorphic-git'; import { Buffer } from 'buffer'; import git from 'isomorphic-git'; import * as gitUtils from './utils'; @@ -398,14 +399,28 @@ async function* generatePackData({ objectIds: Array; chunkSize?: number; }): AsyncGenerator { - const packFile = await git.packObjects({ - fs: efs, - dir, - gitdir: gitDir, - oids: objectIds, - }); - if (packFile.packfile == null) utils.never('failed to create packFile data'); - let packFileBuffer = Buffer.from(packFile.packfile.buffer); + let packFile: PackObjectsResult; + // In case of errors we don't want to throw them. This will result in the error being thrown into `isometric-git` + // when it consumes the response. It handles this by logging out the error which we don't want to happen. + try { + packFile = await git.packObjects({ + fs: efs, + dir, + gitdir: gitDir, + oids: objectIds, + }); + } catch { + // Return without sending any data + return; + } + // Pack file will only be undefined if it was written to disk instead + if (packFile.packfile == null) return; + // Convert to a buffer without copy, so we can process it + let packFileBuffer = Buffer.from( + packFile.packfile.buffer, + 0, + packFile.packfile.byteLength, + ); // Streaming the packFile as chunks of the length specified by the `chunkSize`. // Each line is formatted as a `PKT-LINE` diff --git a/src/git/utils.ts b/src/git/utils.ts index 8323f4a1f..fef9d9b3a 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -8,6 +8,7 @@ import type { RequestType, } from './types'; import type { EncryptedFS } from 'encryptedfs'; +import path from 'path'; import git from 'isomorphic-git'; import { requestTypes } from './types'; import * as utils from '../utils'; @@ -230,6 +231,35 @@ async function listObjects({ return [...commits, ...trees, ...blobs, ...tags]; } +const objectsDirName = 'objects'; +const excludedDirs = ['pack', 'info']; + +/** + * Walks the filesystem to list out all git objects in the objects directory + */ +async function listObjectsAll({ + fs, + gitDir, +}: { + fs: EncryptedFS; + gitDir: string; +}) { + const objectsDirPath = path.join(gitDir, objectsDirName); + const objectSet: Set = new Set(); + const objectDirs = await fs.promises.readdir(objectsDirPath); + for (const objectDir of objectDirs) { + if (typeof objectDir !== 'string') utils.never(); + if (excludedDirs.includes(objectDir)) continue; + const objectIds = await fs.promises.readdir( + path.join(objectsDirPath, objectDir), + ); + for (const objectId of objectIds) { + objectSet.add(objectDir + objectId); + } + } + return [...objectSet]; +} + /** * Parses a want/has line from ref negotiation phase. */ @@ -309,6 +339,7 @@ export { listReferencesGenerator, referenceCapability, listObjects, + listObjectsAll, parseRequestLine, isObjectId, assertObjectId, diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 2575144cc..377ae376d 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -33,6 +33,7 @@ import * as vaultsEvents from './events'; import { tagLast } from './types'; import * as ids from '../ids'; import * as nodesUtils from '../nodes/utils'; +import * as gitUtils from '../git/utils'; import * as utils from '../utils'; type RemoteInfo = { @@ -54,6 +55,11 @@ interface VaultInternal extends CreateDestroyStartStop {} }, ) class VaultInternal { + /** + * Creates a VaultInternal. + * If no state already exists then state for the vault is initialized. + * If state already exists then this just creates the `VaultInternal` instance for managing that state. + */ public static async createVaultInternal({ vaultId, vaultName, @@ -106,6 +112,9 @@ class VaultInternal { return vault; } + /** + * Will create a new vault by cloning the vault from a remote node. + */ public static async cloneVaultInternal({ targetNodeId, targetVaultNameOrId, @@ -155,48 +164,33 @@ class VaultInternal { efs, logger, }); - // This error flag will contain the error returned by the cloning rpc stream - let error; // Make the directory where the .git files will be auto generated and // where the contents will be cloned to ('contents' file) await efs.mkdir(vault.vaultDataDir, { recursive: true }); - let vaultName: VaultName; - let remoteVaultId: VaultId; - let remote: RemoteInfo; - try { - [vaultName, remoteVaultId] = await nodeManager.withConnF( - targetNodeId, - async (connection) => { - const client = connection.getClient(); + const [vaultName, remoteVaultId]: [VaultName, VaultId] = + await nodeManager.withConnF(targetNodeId, async (connection) => { + const client = connection.getClient(); - const [request, vaultName, remoteVaultId] = await vault.request( - client, - targetVaultNameOrId, - 'clone', - ); - await git.clone({ - fs: efs, - http: { request }, - dir: vault.vaultDataDir, - gitdir: vault.vaultGitDir, - url: 'http://', - singleBranch: true, - }); - return [vaultName, remoteVaultId]; - }, - ); - remote = { - remoteNode: nodesUtils.encodeNodeId(targetNodeId), - remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), - }; - } catch (e) { - // If the error flag set and we have the generalised SmartHttpError from - // isomorphic git then we need to throw the polykey error - if (e instanceof git.Errors.SmartHttpError && error) { - throw error; - } - throw e; - } + const [request, vaultName, remoteVaultId] = await vault.request( + client, + targetVaultNameOrId, + 'clone', + ); + await git.clone({ + fs: efs, + http: { request }, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + url: 'http://', + singleBranch: true, + ref: vaultsUtils.canonicalBranchRef, + }); + return [vaultName, remoteVaultId]; + }); + const remote: RemoteInfo = { + remoteNode: nodesUtils.encodeNodeId(targetNodeId), + remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), + }; await vault.start({ vaultName, tran }); // Setting the remote in the metadata @@ -281,6 +275,10 @@ class VaultInternal { return await this.start_(fresh, tran, vaultName); } + /** + * We use a protected start method to avoid the `async-init` lifecycle deadlocking when doing the recursive call to + * create a DBTransaction. + */ protected async start_( fresh: boolean, tran: DBTransaction, @@ -291,7 +289,6 @@ class VaultInternal { ); this.vaultMetadataDbPath = [...this.vaultsDbPath, this.vaultIdEncoded]; this.vaultsNamesPath = [...this.vaultsDbPath, 'names']; - // Let's backup any metadata if (fresh) { await tran.clear(this.vaultMetadataDbPath); try { @@ -304,9 +301,9 @@ class VaultInternal { } } } - await this.mkdirExists(this.vaultIdEncoded); - await this.mkdirExists(this.vaultDataDir); - await this.mkdirExists(this.vaultGitDir); + await vaultsUtils.mkdirExists(this.efs, this.vaultIdEncoded); + await vaultsUtils.mkdirExists(this.efs, this.vaultDataDir); + await vaultsUtils.mkdirExists(this.efs, this.vaultGitDir); await this.setupMeta({ vaultName, tran }); await this.setupGit(tran); this.efsVault = await this.efs.chroot(this.vaultDataDir); @@ -315,16 +312,6 @@ class VaultInternal { ); } - protected async mkdirExists(directory: string) { - try { - await this.efs.mkdir(directory, { recursive: true }); - } catch (e) { - if (e.code !== 'EEXIST') { - throw e; - } - } - } - public async stop(): Promise { this.logger.info( `Stopping ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -341,6 +328,10 @@ class VaultInternal { return await this.destroy_(tran); } + /** + * We use a protected destroy method to avoid the `async-init` lifecycle deadlocking when doing the recursive call to + * create a DBTransaction. + */ protected async destroy_(tran: DBTransaction) { this.logger.info( `Destroying ${this.constructor.name} - ${this.vaultIdEncoded}`, @@ -364,9 +355,7 @@ class VaultInternal { ref: string | VaultRef = 'HEAD', limit?: number, ): Promise> { - if (!vaultsUtils.validateRef(ref)) { - throw new vaultsErrors.ErrorVaultReferenceInvalid(); - } + vaultsUtils.assertRef(ref); if (ref === vaultsUtils.tagLast) { ref = vaultsUtils.canonicalBranch; } @@ -400,9 +389,7 @@ class VaultInternal { */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public async version(ref: string | VaultRef = tagLast): Promise { - if (!vaultsUtils.validateRef(ref)) { - throw new vaultsErrors.ErrorVaultReferenceInvalid(); - } + vaultsUtils.assertRef(ref); if (ref === vaultsUtils.tagLast) { ref = vaultsUtils.canonicalBranch; } @@ -427,6 +414,9 @@ class VaultInternal { } } + /** + * With context handler for using a vault in a read-only context. + */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public async readF(f: (fs: FileSystemReadable) => Promise): Promise { return withF([this.lock.read()], async () => { @@ -434,6 +424,9 @@ class VaultInternal { }); } + /** + * With context handler for using a vault in a read-only context for a generator. + */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public readG( g: (fs: FileSystemReadable) => AsyncGenerator, @@ -444,6 +437,9 @@ class VaultInternal { }); } + /** + * With context handler for using a vault in a writable context. + */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public async writeF( f: (fs: FileSystemWritable) => Promise, @@ -491,6 +487,9 @@ class VaultInternal { }); } + /** + * With context handler for using a vault in a writable context for a generator. + */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public writeG( g: (fs: FileSystemWritable) => AsyncGenerator, @@ -517,7 +516,7 @@ class VaultInternal { ); await tran.put([...vaultMetadataDbPath, VaultInternal.dirtyKey], true); - let result; + let result: TReturn; // Do what you need to do here, create the commit try { result = yield* g(efsVault); @@ -537,6 +536,10 @@ class VaultInternal { }); } + /** + * Pulls changes to a vault from the vault's default remote. + * If `pullNodeId` and `pullVaultNameOrId` it uses that for the remote instead. + */ @ready(new vaultsErrors.ErrorVaultNotRunning()) public async pullVault({ nodeManager, @@ -560,10 +563,8 @@ class VaultInternal { ); } - // This error flag will contain the error returned by the cloning rpc stream - let error; // Keeps track of whether the metadata needs changing to avoid unnecessary db ops - // 0 = no change, 1 = change with vault Id, 2 = change with vault name + // 0 = no change, 1 = change with vault ID, 2 = change with vault name let metaChange = 0; const remoteInfo = await tran.get([ ...this.vaultMetadataDbPath, @@ -610,8 +611,10 @@ class VaultInternal { dir: this.vaultDataDir, gitdir: this.vaultGitDir, url: `http://`, - ref: 'HEAD', + ref: vaultsUtils.canonicalBranchRef, singleBranch: true, + fastForward: true, + fastForwardOnly: true, author: { name: nodesUtils.encodeNodeId(pullNodeId!), }, @@ -620,17 +623,15 @@ class VaultInternal { return remoteVaultId; }, ); - } catch (err) { - // If the error flag set and we have the generalised SmartHttpError from + } catch (e) { + // If the error flag set, and we have the generalised SmartHttpError from // isomorphic git then we need to throw the polykey error - if (err instanceof git.Errors.SmartHttpError && error) { - throw error; - } else if (err instanceof git.Errors.MergeNotSupportedError) { - throw new vaultsErrors.ErrorVaultsMergeConflict(err.message, { - cause: err, + if (e instanceof git.Errors.MergeNotSupportedError) { + throw new vaultsErrors.ErrorVaultsMergeConflict(e.message, { + cause: e, }); } - throw err; + throw e; } if (metaChange !== 0) { if (metaChange === 2) { @@ -649,7 +650,9 @@ class VaultInternal { } /** - * Setup the vault metadata + * Sets up the vault metadata. + * Creates a `dirty` boolean in the database to track dirty state of the vault. + * Also adds the vault's name to the database. */ protected async setupMeta({ vaultName, @@ -658,14 +661,7 @@ class VaultInternal { vaultName?: VaultName; tran: DBTransaction; }): Promise { - // Setup the vault metadata - // and you need to make certain preparations - // the meta gets created first - // if the SoT is the database - // are we supposed to check this? - - // If this is not existing - // setup default vaults db + // Set up dirty key defaulting to false if ( (await tran.get([ ...this.vaultMetadataDbPath, @@ -692,11 +688,15 @@ class VaultInternal { ); } - // Remote: [NodeId, VaultId] | undefined - // dirty: boolean + // Dirty: boolean // name: string | undefined } + /** + * Does an idempotent initialization of the git repository for the vault. + * If the vault is in a dirty state then we clean up the working directory + * or any history not part of the canonicalBranch. + */ protected async setupGit(tran: DBTransaction): Promise { // Initialization is idempotent // It works even with an existing git repository @@ -754,7 +754,7 @@ class VaultInternal { // This ensures that any uncommitted state is dropped await this.cleanWorkingDirectory(); // Do global GC operation - await this.garbageCollectGitObjects(); + await this.garbageCollectGitObjectsGlobal(); // Setting dirty back to false await tran.put( @@ -817,15 +817,17 @@ class VaultInternal { action: vaultAction, }); const result = vaultsGitInfoGetStream.meta?.result; - if (result == null || !utils.isObject(result)) utils.never(); - if (!('vaultName' in result) || typeof result.vaultName != 'string') { - utils.never(); + if (result == null || !utils.isObject(result)) { + utils.never('`result` must be a defined object'); + } + if (!('vaultName' in result) || typeof result.vaultName !== 'string') { + utils.never('`vaultName` must be defined and a string'); } if ( !('vaultIdEncoded' in result) || - typeof result.vaultIdEncoded != 'string' + typeof result.vaultIdEncoded !== 'string' ) { - utils.never(); + utils.never('`vaultIdEncoded` must be defined and a string'); } const vaultName = result.vaultName; const remoteVaultId = ids.parseVaultId(result.vaultIdEncoded); @@ -879,7 +881,10 @@ class VaultInternal { } /** - * Creates a commit while moving the canonicalBranch reference + * Creates a commit while moving the canonicalBranch reference to that new commit. + * If the commit creates a branch from the canonical history. Then the new commit becomes the new canonical history + * and the old history is removed from the old canonical head to the branch point. This is to maintain the strict + * non-branching linear history. */ protected async createCommit() { // Checking if commit is appending or branching @@ -900,78 +905,71 @@ class VaultInternal { const message: string[] = []; // Get the status of each file in the working directory // https://isomorphic-git.org/docs/en/statusMatrix + await git.add({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: '.', + }); const statusMatrix = await git.statusMatrix({ fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, }); - for (let [ + for (const [ filePath, HEADStatus, workingDirStatus, stageStatus, ] of statusMatrix) { - // Reset the index of files that are marked as 'unmodified' - // The working directory, HEAD and staging area are all the same - // https://github.com/MatrixAI/js-polykey/issues/260 - if (HEADStatus === workingDirStatus && workingDirStatus === stageStatus) { - await git.resetIndex({ - fs: this.efs, - dir: this.vaultDataDir, - gitdir: this.vaultGitDir, - filepath: filePath, - }); - // Check if the file is still 'unmodified' and leave - // it out of the commit if it is - [filePath, HEADStatus, workingDirStatus, stageStatus] = ( - await git.statusMatrix({ - fs: this.efs, - dir: this.vaultDataDir, - gitdir: this.vaultGitDir, - filepaths: [filePath], - }) - ).pop()!; - if ( - HEADStatus === workingDirStatus && - workingDirStatus === stageStatus - ) { - continue; - } - } - // We want files in the working directory that are both different - // from the head commit and the staged changes - // If working directory and stage status are not equal then filepath has un-staged - // changes in the working directory relative to both the HEAD and staging - // area that need to be added - // https://isomorphic-git.org/docs/en/statusMatrix - if (workingDirStatus !== stageStatus) { - let status: 'added' | 'modified' | 'deleted'; - // If the working directory status is 0 then the file has - // been deleted - if (workingDirStatus === 0) { - status = 'deleted'; + /* + Type StatusRow = [Filename, HeadStatus, WorkdirStatus, StageStatus] + The HeadStatus status is either absent (0) or present (1). + The WorkdirStatus status is either absent (0), identical to HEAD (1), or different from HEAD (2). + The StageStatus status is either absent (0), identical to HEAD (1), identical to WORKDIR (2), or different from WORKDIR (3). + + ```js + // example StatusMatrix + [ + ["a.txt", 0, 2, 0], // new, untracked + ["b.txt", 0, 2, 2], // added, staged + ["c.txt", 0, 2, 3], // added, staged, with unstaged changes + ["d.txt", 1, 1, 1], // unmodified + ["e.txt", 1, 2, 1], // modified, unstaged + ["f.txt", 1, 2, 2], // modified, staged + ["g.txt", 1, 2, 3], // modified, staged, with unstaged changes + ["h.txt", 1, 0, 1], // deleted, unstaged + ["i.txt", 1, 0, 0], // deleted, staged + ] + ``` + */ + const status = `${HEADStatus}${workingDirStatus}${stageStatus}`; + switch (status) { + case '022': // Added, staged + message.push(`${filePath} added`); + break; + case '111': // Unmodified + break; + case '122': // Modified, staged + message.push(`${filePath} modified`); + break; + case '101': // Deleted, unStaged + // need to stage the deletion with remove await git.remove({ fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, filepath: filePath, }); - } else { - await git.add({ - fs: this.efs, - dir: this.vaultDataDir, - gitdir: this.vaultGitDir, - filepath: filePath, - }); - // Check whether the file already exists inside the HEAD - // commit and if it does then it is unmodified - if (HEADStatus === 1) { - status = 'modified'; - } else { - status = 'added'; - } - } - message.push(`${filePath} ${status}`); + // Fall through + case '100': // Deleted, staged + message.push(`${filePath} deleted`); + break; + default: + // We don't handle untracked and partially staged files since we add all files to staging before processing + utils.never( + `Status ${status} is unhandled because it was unexpected state`, + ); } } // Skip commit if no changes were made @@ -998,33 +996,15 @@ class VaultInternal { }); // We clean old history if a commit was made on previous version if (headRef !== masterRef) { - // Delete old commits following chain from masterRef -> headRef - let currentRef = masterRef; - while (currentRef !== headRef) { - // Read commit info - const commit = await git.readCommit({ - fs: this.efs, - dir: this.vaultDataDir, - gitdir: this.vaultGitDir, - oid: currentRef, - }); - // Delete commit - await vaultsUtils.deleteObject( - this.efs, - this.vaultGitDir, - commit.oid, - ); - // Getting new ref - const nextRef = commit.commit.parent.pop(); - if (nextRef == null) break; - currentRef = nextRef; - } + await this.garbageCollectGitObjectsLocal(masterRef, headRef); } } } /** - * Cleans the git working directory by checking out the canonicalBranch + * Cleans the git working directory by checking out the canonicalBranch. + * This will remove any un-committed changes since any untracked or modified files outside a commit is dirty state. + * Dirty state should only happen if the usual commit procedure was interrupted ungracefully. */ protected async cleanWorkingDirectory() { // Check the status matrix for any un-staged file changes @@ -1064,69 +1044,65 @@ class VaultInternal { } /** - * Deletes any git objects that can't be reached from the canonicalBranch + * This will walk the current canonicalBranch history and delete any objects that are not a part of it. + * This is costly since it will compare the walked tree with all existing objects. */ - protected async garbageCollectGitObjects() { - // To garbage collect the git objects, - // we need to walk all objects connected to the master branch - // and delete the object files that are not touched by this walk - const touchedOids = {}; + protected async garbageCollectGitObjectsGlobal() { + const objectIdsAll = await gitUtils.listObjectsAll({ + fs: this.efs, + gitDir: this.vaultGitDir, + }); + const objects = new Set(objectIdsAll); const masterRef = await git.resolveRef({ fs: this.efs, dir: this.vaultDataDir, gitdir: this.vaultGitDir, ref: vaultsUtils.canonicalBranch, }); - const queuedOids: string[] = [masterRef]; - while (queuedOids.length > 0) { - const currentOid = queuedOids.shift()!; - if (touchedOids[currentOid] === null) continue; - const result = await git.readObject({ - fs: this.efs, - dir: this.vaultDataDir, - gitdir: this.vaultGitDir, - oid: currentOid, - }); - touchedOids[result.oid] = result.type; - if (result.format !== 'parsed') continue; - switch (result.type) { - case 'commit': - { - const object = result.object; - queuedOids.push(...object.parent); - queuedOids.push(object.tree); - } - break; - case 'tree': - { - const object = result.object; - for (const item of object) { - touchedOids[item.oid] = item.type; - } - } - break; - default: { - utils.never(); - } - } + const reachableObjects = await gitUtils.listObjects({ + efs: this.efs, + dir: this.vaultDataDir, + gitDir: this.vaultGitDir, + wants: [masterRef], + haves: [], + }); + // Walk from head to all reachable objects + for (const objectReachable of reachableObjects) { + objects.delete(objectReachable); + } + // Any objects left in `objects` was unreachable, thus they are a part of orphaned branches + // So we want to delete them. + const deletePs: Array> = []; + for (const objectId of objects) { + deletePs.push( + vaultsUtils.deleteObject(this.efs, this.vaultGitDir, objectId), + ); } - // Walking all objects - const objectPath = path.join(this.vaultGitDir, 'objects'); - const buckets = (await this.efs.readdir(objectPath)).filter((item) => { - return item !== 'info' && item !== 'pack'; + await Promise.all(deletePs); + } + + /** + * This will walk from the `startId` to the `StopId` deleting objects as it goes. + * This is smarter since it only walks over the old history and not everything. + */ + protected async garbageCollectGitObjectsLocal( + startId: string, + stopId: string, + ) { + const objects = await gitUtils.listObjects({ + efs: this.efs, + dir: this.vaultDataDir, + gitDir: this.vaultGitDir, + wants: [startId], + haves: [stopId], }); - for (const bucket of buckets) { - const bucketPath = path.join(objectPath, bucket.toString()); - const oids = await this.efs.readdir(bucketPath); - for (const shortOid of oids) { - const oidPath = path.join(bucketPath, shortOid.toString()); - const oid = bucket.toString() + shortOid.toString(); - if (touchedOids[oid] === undefined) { - // Removing unused objects - await this.efs.unlink(oidPath); - } - } + const deletePs: Array> = []; + for (const objectId of objects) { + deletePs.push( + vaultsUtils.deleteObject(this.efs, this.vaultGitDir, objectId), + ); } + await Promise.all(deletePs); } } diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 22f45305e..4618661b2 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -806,7 +806,8 @@ class VaultManager { tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { - const handleInfoRequest = (tran) => this.handleInfoRequest(vaultId, tran); + const handleInfoRequest = (tran: DBTransaction) => + this.handleInfoRequest(vaultId, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleInfoRequest(tran); }); @@ -907,7 +908,8 @@ class VaultManager { }> { if (tran == null) { // Lambda to maintain `this` context - const handleScanVaults = (tran) => this.handleScanVaults(nodeId, tran); + const handleScanVaults = (tran: DBTransaction) => + this.handleScanVaults(nodeId, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleScanVaults(tran); }); diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index ac6a7a75c..abd931429 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -12,13 +12,6 @@ type FileOptions = { recursive?: boolean; }; -// TODO: tests -// - add succeeded -// - secret exists -// - secret with directory -// Might just drop the return type -// I don't see a case where it would be false without an error -// - Add locking? async function addSecret( vault: Vault, secretName: string, @@ -33,12 +26,7 @@ async function addSecret( } // Create the directory to the secret if it doesn't exist - try { - await efs.mkdir(path.dirname(secretName), { recursive: true }); - } catch (err) { - if (err.code !== 'EEXIST') throw err; - } - + await vaultsUtils.mkdirExists(efs, path.dirname(secretName)); // Write the secret into the vault await efs.writeFile(secretName, content); }); @@ -49,9 +37,6 @@ async function addSecret( /** * Changes the contents of a secret */ -// TODO: tests -// - updates -// - invalid name async function updateSecret( vault: Vault, secretName: string, @@ -76,89 +61,105 @@ async function updateSecret( /** * Changes the name of a secret in a vault */ -// Todo: tests -// - Valid name -// - invalid name async function renameSecret( vault: Vault, - currstring: string, - newstring: string, + secretName: string, + secretNameNew: string, logger?: Logger, ): Promise { await vault.writeF(async (efs) => { - await efs.rename(currstring, newstring); + if (!(await efs.exists(secretName))) { + throw new vaultsErrors.ErrorSecretsSecretUndefined( + 'Secret does not exist, can not rename', + ); + } + await efs.rename(secretName, secretNameNew); }); logger?.info( - `Renamed secret at ${currstring} to ${newstring} in vault ${vault.vaultId}`, + `Renamed secret at ${secretName} to ${secretNameNew} in vault ${vault.vaultId}`, ); } /** * Returns the contents of a secret */ -// TODO: tests -// - read existing file -// - try to read non-existent file -// - read directory? async function getSecret(vault: Vault, secretName: string): Promise { try { return await vault.readF(async (efs) => { return (await efs.readFile(secretName)) as Buffer; }); - } catch (err) { - if (err.code === 'ENOENT') { + } catch (e) { + if (e.code === 'ENOENT') { throw new vaultsErrors.ErrorSecretsSecretUndefined( `Secret with name: ${secretName} does not exist`, - { cause: err }, + { cause: e }, ); } - throw err; + if (e.code === 'EISDIR') { + throw new vaultsErrors.ErrorSecretsIsDirectory( + `${secretName} is a directory and not a secret`, + { cause: e }, + ); + } + throw e; } } +/** + * Returns the file stats of a secret + */ async function statSecret(vault: Vault, secretName: string): Promise { try { return await vault.readF(async (efs) => { return await efs.stat(secretName); }); - } catch (err) { - if (err.code === 'ENOENT') { + } catch (e) { + if (e.code === 'ENOENT') { throw new vaultsErrors.ErrorSecretsSecretUndefined( `Secret with name: ${secretName} does not exist`, - { cause: err }, + { cause: e }, ); } - throw err; + throw e; } } /** * Removes a secret from a vault */ -// TODO: tests -// - delete a secret -// - Secret doesn't exist -// - delete a full and empty directory with and without recursive async function deleteSecret( vault: Vault, secretName: string, fileOptions?: FileOptions, logger?: Logger, ): Promise { - await vault.writeF(async (efs) => { - if ((await efs.stat(secretName)).isDirectory()) { - await efs.rmdir(secretName, fileOptions); - logger?.info(`Deleted directory at '${secretName}'`); - } else if (await efs.exists(secretName)) { - // Remove the specified file - await efs.unlink(secretName); - logger?.info(`Deleted secret at '${secretName}'`); - } else { + try { + await vault.writeF(async (efs) => { + const stat = await efs.stat(secretName); + if (stat.isDirectory()) { + await efs.rmdir(secretName, fileOptions); + logger?.info(`Deleted directory at '${secretName}'`); + } else { + // Remove the specified file + await efs.unlink(secretName); + logger?.info(`Deleted secret at '${secretName}'`); + } + }); + } catch (e) { + if (e.code === 'ENOENT') { throw new vaultsErrors.ErrorSecretsSecretUndefined( - `path '${secretName}' does not exist in vault`, + `Secret with name: ${secretName} does not exist`, + { cause: e }, ); } - }); + if (e.code === 'ENOTEMPTY') { + throw new vaultsErrors.ErrorVaultsRecursive( + `Could not delete directory '${secretName}' without recursive option`, + { cause: e }, + ); + } + throw e; + } } /** @@ -171,18 +172,25 @@ async function mkdir( fileOptions?: FileOptions, logger?: Logger, ): Promise { - const recursive = !!fileOptions?.recursive; + const recursive = fileOptions?.recursive ?? false; await vault.writeF(async (efs) => { try { await efs.mkdir(dirPath, fileOptions); - } catch (err) { - if (err.code === 'ENOENT' && !recursive) { + } catch (e) { + if (e.code === 'ENOENT' && !recursive) { throw new vaultsErrors.ErrorVaultsRecursive( `Could not create directory '${dirPath}' without recursive option`, - { cause: err }, + { cause: e }, ); } + if (e.code === 'EEXIST') { + throw new vaultsErrors.ErrorSecretsSecretDefined( + `${dirPath} already exists`, + { cause: e }, + ); + } + throw e; } logger?.info(`Created secret directory at '${dirPath}'`); }); @@ -218,26 +226,22 @@ async function addSecretDirectory( // Write secret into vault await efs.writeFile(relPath, content); logger?.info(`Added secret at directory '${relPath}'`); - } catch (err) { + } catch (e) { // Warn of a failed addition but continue operation logger?.warn(`Adding secret ${relPath} failed`); - throw err; + throw e; } } else { try { // Create directory if it doesn't exist - try { - await efs.mkdir(path.dirname(relPath), { recursive: true }); - } catch (err) { - if (err.code !== 'EEXIST') throw err; - } + await vaultsUtils.mkdirExists(efs, path.dirname(relPath)); // Write secret into vault await efs.writeFile(relPath, content, {}); logger?.info(`Added secret to directory at '${relPath}'`); - } catch (err) { + } catch (e) { // Warn of a failed addition but continue operation logger?.warn(`Adding secret ${relPath} failed`); - throw err; + throw e; } } } @@ -247,9 +251,6 @@ async function addSecretDirectory( /** * Retrieves a list of the secrets in a vault */ -// TODO: tests -// - read secrets -// - no secrets async function listSecrets(vault: Vault): Promise { return await vault.readF(async (efs) => { const secrets: string[] = []; diff --git a/src/vaults/errors.ts b/src/vaults/errors.ts index 7e184255c..b887846c4 100644 --- a/src/vaults/errors.ts +++ b/src/vaults/errors.ts @@ -113,6 +113,11 @@ class ErrorSecretsSecretDefined extends ErrorSecrets { exitCode = sysexits.USAGE; } +class ErrorSecretsIsDirectory extends ErrorSecrets { + static description = 'Is a directory and not a secret'; + exitCode = sysexits.USAGE; +} + export { ErrorVaults, ErrorVaultManagerRunning, @@ -138,4 +143,5 @@ export { ErrorSecrets, ErrorSecretsSecretUndefined, ErrorSecretsSecretDefined, + ErrorSecretsIsDirectory, }; diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index 53397c8f8..449cd4adb 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -4,11 +4,13 @@ import type { VaultAction, CommitId, FileSystemReadable, + FileSystemWritable, } from './types'; import type { NodeId } from '../ids/types'; import type { Path } from 'encryptedfs/dist/types'; import path from 'path'; import { pathJoin } from 'encryptedfs/dist/utils'; +import * as vaultsErrors from './errors'; import { tagLast, refs, vaultActions } from './types'; import * as nodesUtils from '../nodes/utils'; import * as validationErrors from '../validation/errors'; @@ -29,6 +31,12 @@ function validateRef(ref: any): ref is VaultRef { return refs.includes(ref) || validateCommitId(ref); } +function assertRef(ref: any): asserts ref is VaultRef { + if (!validateRef(ref)) { + throw new vaultsErrors.ErrorVaultReferenceInvalid(); + } +} + /** * Commit ids are SHA1 hashes encoded as 40-character long lowercase hexadecimal strings */ @@ -44,12 +52,12 @@ function commitAuthor(nodeId: NodeId): { name: string; email: string } { } async function* readDirRecursively( - fs: FileSystemReadable, + fs, dir = '.', ): AsyncGenerator { - const dirents = await fs.promises.readdir(dir); - for (const dirent of dirents) { - const res = path.join(dir, dirent.toString()); + const dirEntries = await fs.promises.readdir(dir); + for (const dirEntry of dirEntries) { + const res = path.join(dir, dirEntry.toString()); const stat = await fs.promises.stat(res); if (stat.isDirectory()) { yield* readDirRecursively(fs, res); @@ -96,8 +104,8 @@ function parseVaultAction(data: any): VaultAction { async function deleteObject(fs: EncryptedFS, gitdir: string, ref: string) { const bucket = ref.slice(0, 2); - const shortref = ref.slice(2); - const objectPath = path.join(gitdir, 'objects', bucket, shortref); + const shortRef = ref.slice(2); + const objectPath = path.join(gitdir, 'objects', bucket, shortRef); try { await fs.unlink(objectPath); } catch (e) { @@ -105,12 +113,23 @@ async function deleteObject(fs: EncryptedFS, gitdir: string, ref: string) { } } +async function mkdirExists(efs: FileSystemWritable, directory: string) { + try { + await efs.mkdir(directory, { recursive: true }); + } catch (e) { + if (e.code !== 'EEXIST') { + throw e; + } + } +} + export { tagLast, refs, canonicalBranch, canonicalBranchRef, validateRef, + assertRef, validateCommitId, commitAuthor, isVaultAction, @@ -118,6 +137,7 @@ export { readDirRecursively, walkFs, deleteObject, + mkdirExists, }; export { createVaultIdGenerator, encodeVaultId, decodeVaultId } from '../ids'; diff --git a/tests/git/http.test.ts b/tests/git/http.test.ts index 898a4ab63..702292f7b 100644 --- a/tests/git/http.test.ts +++ b/tests/git/http.test.ts @@ -5,6 +5,7 @@ import git from 'isomorphic-git'; import { test } from '@fast-check/jest'; import fc from 'fast-check'; import * as gitHttp from '@/git/http'; +import * as gitUtils from '@/git/utils'; import * as validationErrors from '@/validation/errors'; import * as gitTestUtils from './utils'; @@ -191,7 +192,7 @@ describe('Git Http', () => { }, ], }); - const objectIds = await gitTestUtils.listGitObjects(gitDirs); + const objectIds = await gitUtils.listObjectsAll(gitDirs); const gen = gitHttp.generatePackData({ ...gitDirs, objectIds, diff --git a/tests/git/utils.test.ts b/tests/git/utils.test.ts index d09bdd1e5..f981cb238 100644 --- a/tests/git/utils.test.ts +++ b/tests/git/utils.test.ts @@ -183,7 +183,7 @@ describe('Git utils', () => { wants: commitIds, haves: [], }); - const expectedObjectIds = await gitTestUtils.listGitObjects(gitDirs); + const expectedObjectIds = await gitUtils.listObjectsAll(gitDirs); // Found objects should include all the commits expect(objectList).toIncludeAllMembers(commitIds); // Since it was an exhaustive walk of all commits, all objectIds should be included diff --git a/tests/git/utils.ts b/tests/git/utils.ts index dbcafaafb..8652133c3 100644 --- a/tests/git/utils.ts +++ b/tests/git/utils.ts @@ -68,39 +68,6 @@ async function createGitRepo({ } } -const objectsDirName = 'objects'; -const excludedDirs = ['pack', 'info']; - -/** - * Walks the filesystem to list out all git objects in the objects directory - * @param fs - * @param gitDir - */ -async function listGitObjects({ - efs, - gitDir, -}: { - efs: EncryptedFS; - gitDir: string; -}) { - const objectsDirPath = path.join(gitDir, objectsDirName); - const objectSet: Set = new Set(); - const objectDirs = await efs.promises.readdir(objectsDirPath); - for (const objectDir of objectDirs) { - if (typeof objectDir !== 'string') { - utils.never('objectDir should be a string'); - } - if (excludedDirs.includes(objectDir)) continue; - const objectIds = await efs.promises.readdir( - path.join(objectsDirPath, objectDir), - ); - for (const objectId of objectIds) { - objectSet.add(objectDir + objectId); - } - } - return [...objectSet]; -} - type NegotiationTestData = | { type: 'want'; @@ -256,7 +223,6 @@ const gitRequestDataArb = fc.oneof( export { createGitRepo, - listGitObjects, generateGitNegotiationLine, request, gitObjectIdArb, diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index a789b8cff..a4d7ca88b 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -35,7 +35,7 @@ describe('VaultInternal', () => { const vaultIdGenerator = vaultsUtils.createVaultIdGenerator(); - const fakeKeyRing = { + const dummyKeyRing = { getNodeId: () => { return nodeTestUtils.generateRandomNodeId(); }, @@ -44,7 +44,7 @@ describe('VaultInternal', () => { const secret2 = { name: 'secret-2', content: 'secret-content-2' }; const secret3 = { name: 'secret-3', content: 'secret-content-3' }; - const runGen = async (gen) => { + const consumeGenerator = async (gen: AsyncGenerator) => { for await (const _ of gen) { // Do nothing } @@ -91,7 +91,7 @@ describe('VaultInternal', () => { vaultId = vaultIdGenerator(); vault = await VaultInternal.createVaultInternal({ vaultId, - keyRing: fakeKeyRing, + keyRing: dummyKeyRing, efs, logger, fresh: true, @@ -100,7 +100,6 @@ describe('VaultInternal', () => { vaultName: 'testVault', }); }); - afterEach(async () => { await vault.stop(); await vault.destroy(); @@ -151,7 +150,7 @@ describe('VaultInternal', () => { await vault.stop(); vault = await VaultInternal.createVaultInternal({ vaultId, - keyRing: fakeKeyRing, + keyRing: dummyKeyRing, efs, logger, fresh: false, @@ -174,26 +173,25 @@ describe('VaultInternal', () => { }); expect(files).toEqual([]); await vault.writeF(async (efs) => { - await efs.writeFile('test', 'testdata'); + await efs.writeFile('test', 'testData'); }); commit = (await vault.log(undefined, 1))[0]; await vault.version(commit.commitId); const file = await vault.readF(async (efs) => { return await efs.readFile('test', { encoding: 'utf8' }); }); - expect(file).toBe('testdata'); + expect(file).toBe('testData'); }); - // FIXME: addressed in agent migration stage 2 - test.skip('can change commits and preserve the log with no intermediate vault mutation', async () => { + test('can change commits and preserve the log with no intermediate vault mutation', async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); const endCommit = (await vault.log(undefined, 1))[0].commitId; await vault.version(initCommit); @@ -208,41 +206,40 @@ describe('VaultInternal', () => { expect(files).toEqual(['test1', 'test2', 'test3']); }); test('does not allow changing to an unrecognised commit', async () => { - await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( + await expect(() => vault.version('unrecognisedCommit')).rejects.toThrow( vaultsErrors.ErrorVaultReferenceInvalid, ); await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); const secondCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); const fourthCommit = (await vault.log(undefined, 1))[0].commitId; await vault.version(secondCommit); await vault.writeF(async (efs) => { const fd = await efs.open('test3', 'w'); - await efs.write(fd, 'testdata6', 3, 6); + await efs.write(fd, 'testData6', 3, 6); await efs.close(fd); }); await expect(vault.version(fourthCommit)).rejects.toThrow( vaultsErrors.ErrorVaultReferenceMissing, ); }); - // FIXME: addressed in agent migration stage 2 - test.skip('can change to the latest commit', async () => { + test('can change to the latest commit', async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); await vault.version(initCommit); await vault.version(tagLast); @@ -262,18 +259,18 @@ describe('VaultInternal', () => { async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); const secondCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); await vault.version(secondCommit); await vault.writeF(async (efs) => { - await efs.writeFile('test4', 'testdata4'); + await efs.writeFile('test4', 'testData4'); }); let files = await vault.readF(async (efs) => { return await efs.readdir('.'); @@ -423,8 +420,7 @@ describe('VaultInternal', () => { // Has a new commit expect(await vault.log()).toHaveLength(2); }); - // FIXME: addressed in agent migration stage 2 - test.skip('concurrent read operations allowed', async () => { + test('concurrent read operations allowed', async () => { await vault.writeF(async (efs) => { await efs.writeFile(secret1.name, secret1.content); await efs.writeFile(secret2.name, secret2.content); @@ -478,7 +474,7 @@ describe('VaultInternal', () => { // Converting a vault to the interface const vaultInterface = vault as Vault; - // Using the avaliable functions + // Using the available functions await vaultInterface.writeF(async (efs) => { await efs.writeFile('test', 'testContent'); }); @@ -520,7 +516,7 @@ describe('VaultInternal', () => { expect(files).toEqual([]); await expect( vault.writeF(async (efs) => { - await efs.writeFile('test', 'testdata'); + await efs.writeFile('test', 'testData'); }), ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); }); @@ -528,20 +524,20 @@ describe('VaultInternal', () => { 'cannot checkout old commits after branching commit', async () => { await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); const secondCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); const thirdCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); const fourthCommit = (await vault.log(undefined, 1))[0].commitId; await vault.version(secondCommit); await vault.writeF(async (efs) => { - await efs.writeFile('test4', 'testdata4'); + await efs.writeFile('test4', 'testData4'); }); await expect(() => { return vault.version(thirdCommit); @@ -599,7 +595,7 @@ describe('VaultInternal', () => { const vaultEFS = vault.efs; const log = await vault.log(); const ref = log[1].commitId; - await efs.writeFile(path.join(vault.vaultDataDir, 'newfile1'), 'hello'); + await efs.writeFile(path.join(vault.vaultDataDir, 'newFile1'), 'hello'); const newRef1 = await git.commit({ fs: vaultEFS, dir: vault.vaultDataDir, @@ -611,7 +607,7 @@ describe('VaultInternal', () => { message: 'test', ref: ref, }); - await efs.writeFile(path.join(vault.vaultDataDir, 'newfile2'), 'world'); + await efs.writeFile(path.join(vault.vaultDataDir, 'newFile2'), 'world'); const newRef2 = await git.commit({ fs: vaultEFS, dir: vault.vaultDataDir, @@ -668,7 +664,7 @@ describe('VaultInternal', () => { yield await efs.rename('notValid', 'randomName'); // Throws }); // Failing commit operation - await expect(() => runGen(gen)).rejects.toThrow(); + await expect(() => consumeGenerator(gen)).rejects.toThrow(); // Make sure secret1 wasn't written when the above commit failed await vault.readF(async (efs) => { @@ -688,7 +684,7 @@ describe('VaultInternal', () => { secret1.content, ); }); - await runGen(gen); + await consumeGenerator(gen); const log = await vault.log(); expect(log).toHaveLength(2); expect(log[0].commitId).toStrictEqual(commit); @@ -735,8 +731,8 @@ describe('VaultInternal', () => { for (const logElement of log) { refs.push(await quickCommit(logElement.commitId, `secret-${num++}`)); } - // @ts-ignore: private method - await vault.garbageCollectGitObjects(); + // @ts-ignore: protected method + await vault.garbageCollectGitObjectsGlobal(); for (const ref of refs) { await expect( @@ -754,8 +750,7 @@ describe('VaultInternal', () => { // Locking tests const waitDelay = 200; test('writeF respects read and write locking', async () => { - // @ts-ignore: kidnap lock - const lock = vault.lock; + const lock = vault.getLock(); // Hold a write lock const [releaseWrite] = await lock.write()(); @@ -780,8 +775,7 @@ describe('VaultInternal', () => { expect(finished).toBe(true); }); test('writeG respects read and write locking', async () => { - // @ts-ignore: kidnap lock - const lock = vault.lock; + const lock = vault.getLock(); // Hold a write lock const [releaseWrite] = await lock.write()(); @@ -791,7 +785,7 @@ describe('VaultInternal', () => { finished = true; yield; }); - const runP = runGen(writeGen); + const runP = consumeGenerator(writeGen); await sleep(waitDelay); expect(finished).toBe(false); await releaseWrite(); @@ -805,15 +799,14 @@ describe('VaultInternal', () => { finished = true; yield; }); - const runP2 = runGen(writeGen2); + const runP2 = consumeGenerator(writeGen2); await sleep(waitDelay); await releaseRead(); await runP2; expect(finished).toBe(true); }); test('readF respects write locking', async () => { - // @ts-ignore: kidnap lock - const lock = vault.lock; + const lock = vault.getLock(); // Hold a write lock const [releaseWrite] = await lock.write()(); @@ -828,8 +821,7 @@ describe('VaultInternal', () => { expect(finished).toBe(true); }); test('readG respects write locking', async () => { - // @ts-ignore: kidnap lock - const lock = vault.lock; + const lock = vault.getLock(); // Hold a write lock const [releaseWrite] = await lock.write()(); let finished = false; @@ -838,7 +830,7 @@ describe('VaultInternal', () => { finished = true; yield; }); - const runP = runGen(writeGen); + const runP = consumeGenerator(writeGen); await sleep(waitDelay); expect(finished).toBe(false); await releaseWrite(); @@ -846,8 +838,7 @@ describe('VaultInternal', () => { expect(finished).toBe(true); }); test('readF allows concurrent reads', async () => { - // @ts-ignore: kidnap lock - const lock = vault.lock; + const lock = vault.getLock(); // Hold a write lock const [releaseRead] = await lock.read()(); const finished: boolean[] = []; @@ -864,8 +855,7 @@ describe('VaultInternal', () => { await releaseRead(); }); test('readG allows concurrent reads', async () => { - // @ts-ignore: kidnap lock - const lock = vault.lock; + const lock = vault.getLock(); // Hold a write lock const [releaseRead] = await lock.read()(); const finished: boolean[] = []; @@ -875,10 +865,10 @@ describe('VaultInternal', () => { yield; }; await Promise.all([ - runGen(vault.readG(doThing)), - runGen(vault.readG(doThing)), - runGen(vault.readG(doThing)), - runGen(vault.readG(doThing)), + consumeGenerator(vault.readG(doThing)), + consumeGenerator(vault.readG(doThing)), + consumeGenerator(vault.readG(doThing)), + consumeGenerator(vault.readG(doThing)), ]); expect(finished.length).toBe(4); await releaseRead(); @@ -891,7 +881,7 @@ describe('VaultInternal', () => { vault1 = await VaultInternal.createVaultInternal({ db, efs, - keyRing: fakeKeyRing, + keyRing: dummyKeyRing, vaultId: vaultId1, vaultsDbPath, logger, @@ -913,7 +903,7 @@ describe('VaultInternal', () => { vault1 = await VaultInternal.createVaultInternal({ db, efs, - keyRing: fakeKeyRing, + keyRing: dummyKeyRing, vaultId: vaultId1, vaultsDbPath, logger, @@ -932,7 +922,7 @@ describe('VaultInternal', () => { vault2 = await VaultInternal.createVaultInternal({ db, efs, - keyRing: fakeKeyRing, + keyRing: dummyKeyRing, vaultId: vaultId1, vaultsDbPath, logger, @@ -960,4 +950,16 @@ describe('VaultInternal', () => { await vault.destroy(); await vault.destroy(); }); + test('regression check for statusMatrix not recognising changes', async () => { + // If we make a very quick writes then in some cases the change won't be recognized. + // This manifests as a missing commit in this test. + for (let i = 0; i < 10; i++) { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', `${i}`); + }); + } + + const log = await vault.log(); + expect(log).toHaveLength(11); + }); }); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 5eaf98741..51f4ef911 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -80,7 +80,6 @@ describe('VaultManager', () => { logger: logger.getChild(DB.name), }); }); - afterEach(async () => { await db.stop(); await db.destroy(); @@ -90,18 +89,42 @@ describe('VaultManager', () => { }); }); - test('VaultManager readiness', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + describe('with a vault', () => { + let acl: ACL; + let gestaltGraph: GestaltGraph; + let vaultManager: VaultManager; + + beforeEach(async () => { + acl = await ACL.createACL({ + db, + logger, + }); + gestaltGraph = await GestaltGraph.createGestaltGraph({ + db, + acl, + logger, + }); + vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyRing: dummyKeyRing, + gestaltGraph, + nodeManager: dummyNodeManager, + acl, + notificationsManager: dummyNotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); }); - try { + afterEach(async () => { + await vaultManager.stop(); + await vaultManager.destroy(); + await gestaltGraph.stop(); + await gestaltGraph.destroy(); + await acl.stop(); + await acl.destroy(); + }); + + test('VaultManager readiness', async () => { await expect(vaultManager.destroy()).rejects.toThrow( vaultsErrors.ErrorVaultManagerRunning, ); @@ -115,43 +138,13 @@ describe('VaultManager', () => { await expect(async () => { await vaultManager.listVaults(); }).rejects.toThrow(vaultsErrors.ErrorVaultManagerNotRunning); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('is type correct', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('is type correct', async () => { expect(vaultManager).toBeInstanceOf(VaultManager); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test( - 'can create many vaults and open a vault', - async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { + }); + test( + 'can create many vaults and open a vault', + async () => { const vaultNames = [ 'Vault1', 'Vault2', @@ -175,25 +168,10 @@ describe('VaultManager', () => { expect((await vaultManager.listVaults()).size).toEqual( vaultNames.length, ); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }, - globalThis.defaultTimeout * 4, - ); - test('can rename a vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { + }, + globalThis.defaultTimeout * 4, + ); + test('can rename a vault', async () => { const vaultId = await vaultManager.createVault(vaultName); // We can rename the vault here await vaultManager.renameVault(vaultId, secondVaultName); @@ -210,23 +188,8 @@ describe('VaultManager', () => { await expect( vaultManager.renameVault(vaultId, thirdVaultName), ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultDefined); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('can delete a vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('can delete a vault', async () => { expect((await vaultManager.listVaults()).size).toBe(0); const secondVaultId = await vaultManager.createVault(secondVaultName); // @ts-ignore: protected method @@ -238,23 +201,8 @@ describe('VaultManager', () => { expect(vault[destroyed]).toBe(true); // Metadata should be gone expect(await vaultManager.getVaultMeta(secondVaultId)).toBeUndefined(); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('can list vaults', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('can list vaults', async () => { const firstVaultId = await vaultManager.createVault(vaultName); const secondVaultId = await vaultManager.createVault(secondVaultName); const vaultNames: Array = []; @@ -268,25 +216,10 @@ describe('VaultManager', () => { expect(vaultIds.sort()).toEqual( [firstVaultId.toString(), secondVaultId.toString()].sort(), ); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test( - 'able to read and load existing metadata', - async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { + }); + test( + 'able to read and load existing metadata', + async () => { const vaultNames = [ 'Vault1', 'Vault2', @@ -313,25 +246,10 @@ describe('VaultManager', () => { restartedVaultNames.push(vaultName); }); expect(restartedVaultNames.sort()).toEqual(vaultNames.sort()); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }, - globalThis.defaultTimeout * 2, - ); - test('concurrently creating vault with same name only creates 1 vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { + }, + globalThis.defaultTimeout * 2, + ); + test('concurrently creating vault with same name only creates 1 vault', async () => { await expect( Promise.all([ vaultManager.createVault(vaultName), @@ -341,23 +259,8 @@ describe('VaultManager', () => { // @ts-ignore: kidnapping the map const vaultMap = vaultManager.vaultMap; expect(vaultMap.size).toBe(1); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('can concurrently rename the same vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('can concurrently rename the same vault', async () => { const vaultId = await vaultManager.createVault(vaultName); await Promise.all([ vaultManager.renameVault(vaultId, secondVaultName), @@ -366,23 +269,8 @@ describe('VaultManager', () => { const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)) ?.vaultName; expect(vaultNameTest).toBe(thirdVaultName); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('can concurrently open and rename the same vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('can concurrently open and rename the same vault', async () => { const vaultId = await vaultManager.createVault(vaultName); await Promise.all([ vaultManager.renameVault(vaultId, secondVaultName), @@ -391,23 +279,8 @@ describe('VaultManager', () => { const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)) ?.vaultName; expect(vaultNameTest).toBe(secondVaultName); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('can save the commit state of a vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('can save the commit state of a vault', async () => { const vaultId = await vaultManager.createVault(vaultName); await vaultManager.withVaults([vaultId], async (vault) => { await vault.writeF(async (efs) => { @@ -427,23 +300,8 @@ describe('VaultManager', () => { }, ); expect(read).toBe('test'); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('do actions on a vault using `withVault`', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { + test('do actions on a vault using `withVault`', async () => { const vault1 = await vaultManager.createVault('testVault1' as VaultName); const vault2 = await vaultManager.createVault('testVault2' as VaultName); const vaults = [vault1, vault2]; @@ -470,278 +328,405 @@ describe('VaultManager', () => { expect(a.toString()).toEqual('test1'); expect(b.toString()).toEqual('test2'); }); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - describe('with remote agents', () => { - let allDataDir: string; - let keyRing: KeyRing; - let nodeGraph: NodeGraph; - let nodeConnectionManager: NodeConnectionManager; - let nodeManager: NodeManager; - let remoteKeynode1: PolykeyAgent, remoteKeynode2: PolykeyAgent; - let localNodeId: NodeId; - let taskManager: TaskManager; - - beforeAll(async () => { - // Creating agents - allDataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - - remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ - password, - options: { - nodePath: path.join(allDataDir, 'remoteKeynode1'), - agentServiceHost: localhost, - clientServiceHost: localhost, - keys: { - passwordOpsLimit: keysUtils.passwordOpsLimits.min, - passwordMemLimit: keysUtils.passwordMemLimits.min, - strictMemoryLock: false, - }, - }, - logger: logger.getChild('Remote Keynode 1'), - }); - remoteKeynode1Id = remoteKeynode1.keyRing.getNodeId(); - remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ - password, - options: { - nodePath: path.join(allDataDir, 'remoteKeynode2'), - agentServiceHost: localhost, - clientServiceHost: localhost, - keys: { - passwordOpsLimit: keysUtils.passwordOpsLimits.min, - passwordMemLimit: keysUtils.passwordMemLimits.min, - strictMemoryLock: false, - }, - }, - logger: logger.getChild('Remote Keynode 2'), - }); - remoteKeynode2Id = remoteKeynode2.keyRing.getNodeId(); - - // Adding details to each agent - await remoteKeynode1.nodeGraph.setNodeContactAddressData( - remoteKeynode2Id, - [remoteKeynode2.agentServiceHost, remoteKeynode2.agentServicePort], - { - mode: 'direct', - connectedTime: Date.now(), - scopes: ['global'], - }, - ); - await remoteKeynode2.nodeGraph.setNodeContactAddressData( - remoteKeynode1Id, - [remoteKeynode1.agentServiceHost, remoteKeynode1.agentServicePort], - { - mode: 'direct', - connectedTime: Date.now(), - scopes: ['global'], - }, - ); - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: remoteKeynode2Id, - }); - await remoteKeynode2.gestaltGraph.setNode({ - nodeId: remoteKeynode1Id, - }); }); - afterAll(async () => { - await remoteKeynode2.stop(); - await remoteKeynode1.stop(); - await fs.promises.rm(allDataDir, { - recursive: true, - force: true, + test('handleScanVaults should list all vaults with permissions', async () => { + // Setting up state + const nodeId1 = nodeTestUtils.generateRandomNodeId(); + const nodeId2 = nodeTestUtils.generateRandomNodeId(); + await gestaltGraph.setNode({ + nodeId: nodeId1, }); - }); - beforeEach(async () => { - remoteVaultId = await remoteKeynode1.vaultManager.createVault(vaultName); + await gestaltGraph.setNode({ + nodeId: nodeId2, + }); + await gestaltGraph.setGestaltAction(['node', nodeId1], 'scan'); - await remoteKeynode1.gestaltGraph.stop(); - await remoteKeynode1.gestaltGraph.start({ fresh: true }); - await remoteKeynode1.acl.stop(); - await remoteKeynode1.acl.start({ fresh: true }); + const vault1 = await vaultManager.createVault('testVault1' as VaultName); + const vault2 = await vaultManager.createVault('testVault2' as VaultName); + const vault3 = await vaultManager.createVault('testVault3' as VaultName); - nodeGraph = await NodeGraph.createNodeGraph({ - db, - keyRing: dummyKeyRing, - logger, - }); + // Setting permissions + await acl.setVaultAction(vault1, nodeId1, 'clone'); + await acl.setVaultAction(vault1, nodeId1, 'pull'); + await acl.setVaultAction(vault2, nodeId1, 'clone'); + // No permissions for vault3 - keyRing = await KeyRing.createKeyRing({ - keysPath: path.join(allDataDir, 'allKeyRing'), - password: 'password', - passwordOpsLimit: keysUtils.passwordOpsLimits.min, - passwordMemLimit: keysUtils.passwordMemLimits.min, - strictMemoryLock: false, - logger, - }); - localNodeId = keyRing.getNodeId(); + // scanning vaults + const gen = vaultManager.handleScanVaults(nodeId1); + const vaults: Record = {}; + for await (const vault of gen) { + vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; + } + expect(vaults[vault1]).toEqual(['testVault1', ['clone', 'pull']]); + expect(vaults[vault2]).toEqual(['testVault2', ['clone']]); + expect(vaults[vault3]).toBeUndefined(); - taskManager = await TaskManager.createTaskManager({ - db, - lazy: true, - logger, - }); - const tlsConfig = await tlsTestsUtils.createTLSConfig(keyRing.keyPair); - nodeConnectionManager = new NodeConnectionManager({ - keyRing, - tlsConfig, - logger, - }); - await nodeConnectionManager.start({ - host: localhost as Host, - agentService: {} as AgentServerManifest, - }); - nodeManager = new NodeManager({ - db, - keyRing, - nodeConnectionManager, - nodeGraph, - gestaltGraph: dummyGestaltGraph, - sigchain: dummySigchain, - taskManager, - logger, - }); - await nodeManager.start(); - await taskManager.startProcessing(); - await nodeGraph.setNodeContactAddressData( - remoteKeynode1Id, - [remoteKeynode1.agentServiceHost, remoteKeynode1.agentServicePort], - { - mode: 'direct', - connectedTime: Date.now(), - scopes: ['global'], - }, - ); - await nodeGraph.setNodeContactAddressData( - remoteKeynode2Id, - [remoteKeynode2.agentServiceHost, remoteKeynode2.agentServicePort], - { - mode: 'direct', - connectedTime: Date.now(), - scopes: ['global'], - }, - ); - }); - afterEach(async () => { - await taskManager.stopProcessing(); - await taskManager.stopTasks(); - await remoteKeynode1.vaultManager.destroyVault(remoteVaultId); - await nodeManager.stop(); - await nodeConnectionManager.stop(); - await nodeGraph.stop(); - await nodeGraph.destroy(); - await keyRing.stop(); - await keyRing.destroy(); - await taskManager.stop(); + // Should throw due to no permission + await expect(async () => { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // Should throw due to lack of scan permission + await gestaltGraph.setGestaltAction(['node', nodeId2], 'notify'); + await expect(async () => { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); }); + test('stopping respects locks', async () => { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // @ts-ignore: kidnap vaultManager lockBox + const vaultLocks = vaultManager.vaultLocks; - test('clone vaults from a remote keynode using a vault name', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + const vaultIdString = vaultId.toString() as VaultIdString; + // Getting and holding the lock + const vault = vaultMap.get(vaultIdString)!; + const [releaseWrite] = await vaultLocks.lock([ + vaultId.toString(), + RWLockWriter, + 'write', + ])(); + // Try to destroy + const closeP = vaultManager.closeVault(vaultId); + await sleep(1000); + // Shouldn't be closed + expect(vault[running]).toBe(true); + expect(vaultMap.get(vaultIdString)).toBeDefined(); + // Release the lock + await releaseWrite(); + await closeP; + expect(vault[running]).toBe(false); + expect(vaultMap.get(vaultIdString)).toBeUndefined(); + }); + test('destroying respects locks', async () => { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // @ts-ignore: kidnap vaultManager lockBox + const vaultLocks = vaultManager.vaultLocks; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + const vaultIdString = vaultId.toString() as VaultIdString; + // Getting and holding the lock + const vault = vaultMap.get(vaultIdString)!; + const [releaseWrite] = await vaultLocks.lock([ + vaultId.toString(), + RWLockWriter, + 'write', + ])(); + // Try to destroy + const destroyP = vaultManager.destroyVault(vaultId); + await sleep(1000); + // Shouldn't be destroyed + expect(vault[destroyed]).toBe(false); + expect(vaultMap.get(vaultIdString)).toBeDefined(); + // Release the lock + await releaseWrite(); + await destroyP; + expect(vault[destroyed]).toBe(true); + expect(vaultMap.get(vaultIdString)).toBeUndefined(); + }); + test('withVault respects locks', async () => { + // @ts-ignore: kidnap vaultManager lockBox + const vaultLocks = vaultManager.vaultLocks; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const [releaseWrite] = await vaultLocks.lock([ + vaultId.toString(), + RWLockWriter, + 'write', + ])(); + // Try to use vault + let finished = false; + const withP = vaultManager.withVaults([vaultId], async () => { + finished = true; }); - try { - // Creating some state at the remote - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (vault) => { - await vault.writeF(async (efs) => { - await efs.writeFile('secret-1', 'secret1'); - await efs.writeFile('secret-2', 'secret2'); - }); - }, - ); + await sleep(1000); + // Shouldn't be destroyed + expect(finished).toBe(false); + // Release the lock + await releaseWrite(); + await withP; + expect(finished).toBe(true); + }); + test('creation adds a vault', async () => { + await vaultManager.createVault(vaultName); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + }); + test('vaults persist', async () => { + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.closeVault(vaultId); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(0); - // Setting permissions - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: localNodeId, + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId); + expect(vaultMap.size).toBe(1); + + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId); + expect(vaultMap.size).toBe(1); + expect(vault1).toEqual(vault2); + }); + test('vaults can be removed from map', async () => { + const vaultId = await vaultManager.createVault(vaultName); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId); + await vaultManager.closeVault(vaultId); + expect(vaultMap.size).toBe(0); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId); + expect(vault1).not.toEqual(vault2); + }); + test('stopping vaultManager empties map and stops all vaults', async () => { + const vaultId1 = await vaultManager.createVault('vault1'); + const vaultId2 = await vaultManager.createVault('vault2'); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(2); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId1); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId2); + await vaultManager.stop(); + expect(vaultMap.size).toBe(0); + expect(vault1[running]).toBe(false); + expect(vault2[running]).toBe(false); + }); + test('destroying vaultManager destroys all data', async () => { + let vaultManager2: VaultManager | undefined; + try { + const vaultId = await vaultManager.createVault('vault1'); + await vaultManager.stop(); + await vaultManager.destroy(); + // Vaults DB should be empty + expect(await db.count([VaultManager.constructor.name])).toBe(0); + vaultManager2 = await VaultManager.createVaultManager({ + vaultsPath, + keyRing: dummyKeyRing, + gestaltGraph: dummyGestaltGraph, + nodeManager: dummyNodeManager, + acl: dummyACL, + notificationsManager: dummyNotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'clone', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'pull', - ); - await vaultManager.cloneVault(remoteKeynode1Id, vaultName); - const vaultId = await vaultManager.getVaultId(vaultName); - if (vaultId === undefined) fail('VaultId is not found.'); - const [file, secretsList] = await vaultManager.withVaults( - [vaultId], - async (vaultClone) => { - return await vaultClone.readF(async (efs) => { - const file = await efs.readFile('secret-1', { encoding: 'utf8' }); - const secretsList = await efs.readdir('.'); - return [file, secretsList]; - }); - }, + + // @ts-ignore: protected method + await expect(vaultManager2.getVault(vaultId)).rejects.toThrow( + vaultsErrors.ErrorVaultsVaultUndefined, ); - expect(file).toBe('secret1'); - expect(secretsList).toContain('secret-1'); - expect(secretsList).toContain('secret-2'); } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); + await vaultManager2?.stop(); + await vaultManager2?.destroy(); } }); - test('clone vaults from a remote keynode using a vault name with no history', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, + test("withVaults should throw if vaultId doesn't exist", async () => { + const vaultId = vaultIdGenerator(); + await expect( + vaultManager.withVaults([vaultId], async () => { + // Do nothing + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + }); + test('generateVaultId handles vault conflicts', async () => { + const generateVaultIdMock = jest.spyOn( + vaultManager as any, + 'vaultIdGenerator', + ); + // Generate 100 ids + const vaultIds: VaultId[] = []; + for (let i = 0; i < 100; i++) { + vaultIds.push( + // @ts-ignore: protected method + vaultsUtils.encodeVaultId(await vaultManager.generateVaultId()), + ); + } + const duplicates = vaultIds.filter( + (item, index) => vaultIds.indexOf(item) !== index, + ); + expect(duplicates.length).toBe(0); + + const vaultId = await vaultManager.createVault('testVault'); + // Now only returns duplicates + // @ts-ignore - mocking protected method + generateVaultIdMock.mockReturnValue(vaultId); + const asd = async () => { + for (let i = 0; i < 100; i++) { + // @ts-ignore: protected method + await vaultManager.generateVaultId(); + } + }; + await expect(async () => { + return await asd(); + }).rejects.toThrow(vaultsErrors.ErrorVaultsCreateVaultId); + }); + }); + describe('with a vault and remote agents', () => { + let allDataDir: string; + let keyRing: KeyRing; + let nodeGraph: NodeGraph; + let nodeConnectionManager: NodeConnectionManager; + let nodeManager: NodeManager; + let remoteKeynode1: PolykeyAgent, remoteKeynode2: PolykeyAgent; + let localNodeId: NodeId; + let taskManager: TaskManager; + + let vaultManager: VaultManager; + + beforeAll(async () => { + // Creating agents + allDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + options: { + nodePath: path.join(allDataDir, 'remoteKeynode1'), + agentServiceHost: localhost, + clientServiceHost: localhost, + keys: { + passwordOpsLimit: keysUtils.passwordOpsLimits.min, + passwordMemLimit: keysUtils.passwordMemLimits.min, + strictMemoryLock: false, + }, + }, + logger: logger.getChild('Remote Keynode 1'), + }); + remoteKeynode1Id = remoteKeynode1.keyRing.getNodeId(); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + options: { + nodePath: path.join(allDataDir, 'remoteKeynode2'), + agentServiceHost: localhost, + clientServiceHost: localhost, + keys: { + passwordOpsLimit: keysUtils.passwordOpsLimits.min, + passwordMemLimit: keysUtils.passwordMemLimits.min, + strictMemoryLock: false, + }, + }, + logger: logger.getChild('Remote Keynode 2'), + }); + remoteKeynode2Id = remoteKeynode2.keyRing.getNodeId(); + + // Adding details to each agent + await remoteKeynode1.nodeGraph.setNodeContactAddressData( + remoteKeynode2Id, + [remoteKeynode2.agentServiceHost, remoteKeynode2.agentServicePort], + { + mode: 'direct', + connectedTime: Date.now(), + scopes: ['global'], + }, + ); + await remoteKeynode2.nodeGraph.setNodeContactAddressData( + remoteKeynode1Id, + [remoteKeynode1.agentServiceHost, remoteKeynode1.agentServicePort], + { + mode: 'direct', + connectedTime: Date.now(), + scopes: ['global'], + }, + ); + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: remoteKeynode2Id, + }); + await remoteKeynode2.gestaltGraph.setNode({ + nodeId: remoteKeynode1Id, + }); + }); + afterAll(async () => { + await remoteKeynode2.stop(); + await remoteKeynode1.stop(); + await fs.promises.rm(allDataDir, { + recursive: true, + force: true, + }); + }); + beforeEach(async () => { + remoteVaultId = await remoteKeynode1.vaultManager.createVault(vaultName); + + await remoteKeynode1.gestaltGraph.stop(); + await remoteKeynode1.gestaltGraph.start({ fresh: true }); + await remoteKeynode1.acl.stop(); + await remoteKeynode1.acl.start({ fresh: true }); + + nodeGraph = await NodeGraph.createNodeGraph({ + db, keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, + logger, + }); + + keyRing = await KeyRing.createKeyRing({ + keysPath: path.join(allDataDir, 'allKeyRing'), + password: 'password', + passwordOpsLimit: keysUtils.passwordOpsLimits.min, + passwordMemLimit: keysUtils.passwordMemLimits.min, + strictMemoryLock: false, + logger, + }); + localNodeId = keyRing.getNodeId(); + + taskManager = await TaskManager.createTaskManager({ db, - logger: logger.getChild(VaultManager.name), + lazy: true, + logger, }); - try { - // Setting permissions - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: localNodeId, - }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'clone', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'pull', - ); + const tlsConfig = await tlsTestsUtils.createTLSConfig(keyRing.keyPair); + nodeConnectionManager = new NodeConnectionManager({ + keyRing, + tlsConfig, + logger, + }); + await nodeConnectionManager.start({ + host: localhost as Host, + agentService: {} as AgentServerManifest, + }); + nodeManager = new NodeManager({ + db, + keyRing, + nodeConnectionManager, + nodeGraph, + gestaltGraph: dummyGestaltGraph, + sigchain: dummySigchain, + taskManager, + logger, + }); + await nodeManager.start(); + await taskManager.startProcessing(); + await nodeGraph.setNodeContactAddressData( + remoteKeynode1Id, + [remoteKeynode1.agentServiceHost, remoteKeynode1.agentServicePort], + { + mode: 'direct', + connectedTime: Date.now(), + scopes: ['global'], + }, + ); + await nodeGraph.setNodeContactAddressData( + remoteKeynode2Id, + [remoteKeynode2.agentServiceHost, remoteKeynode2.agentServicePort], + { + mode: 'direct', + connectedTime: Date.now(), + scopes: ['global'], + }, + ); - await vaultManager.cloneVault(remoteKeynode1Id, vaultName); - const vaultId = await vaultManager.getVaultId(vaultName); - if (vaultId === undefined) fail('VaultId is not found.'); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('fails to clone non existing vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ + vaultManager = await VaultManager.createVaultManager({ vaultsPath, keyRing: dummyKeyRing, gestaltGraph: dummyGestaltGraph, @@ -751,57 +736,208 @@ describe('VaultManager', () => { db, logger: logger.getChild(VaultManager.name), }); - try { - // Setting permissions - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: localNodeId, - }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'clone', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'pull', - ); + }); + afterEach(async () => { + await taskManager.stopProcessing(); + await taskManager.stopTasks(); + await remoteKeynode1.vaultManager.destroyVault(remoteVaultId); + await nodeManager.stop(); + await nodeConnectionManager.stop(); + await nodeGraph.stop(); + await nodeGraph.destroy(); + await keyRing.stop(); + await keyRing.destroy(); + await taskManager.stop(); + }); - await testUtils.expectRemoteError( - vaultManager.cloneVault( - remoteKeynode1Id, - 'not-existing' as VaultName, - ), - vaultsErrors.ErrorVaultsVaultUndefined, - ); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } + test('clone vaults from a remote keynode using a vault name', async () => { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, + }); + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, + ); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + }); + test('clone vaults from a remote keynode using a vault name with no history', async () => { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, + }); + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + }); + test('fails to clone non existing vault', async () => { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, + }); + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await testUtils.expectRemoteError( + vaultManager.cloneVault(remoteKeynode1Id, 'not-existing' as VaultName), + vaultsErrors.ErrorVaultsVaultUndefined, + ); }); test('clone and pull vaults using a vault id', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, }); - try { + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, + ); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + }); + test('should reject cloning when permissions are not set', async () => { + // Should reject with no permissions set + await testUtils.expectRemoteError( + vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId), + vaultsErrors.ErrorVaultsPermissionDenied, + ); + // No new vault created + expect((await vaultManager.listVaults()).size).toBe(0); + }); + test('should reject Pulling when permissions are not set', async () => { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, + }); + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + + const clonedVaultId = await vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + + await testUtils.expectRemoteError( + vaultManager.pullVault({ vaultId: clonedVaultId }), + vaultsErrors.ErrorVaultsPermissionDenied, + ); + }); + // FIXME: Test has been disabled due to non-deterministic failures in CI/CD + test( + 'can pull a cloned vault', + async () => { // Creating some state at the remote await remoteKeynode1.vaultManager.withVaults( [remoteVaultId], async (vault) => { await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret1'); - await efs.writeFile('secret-2', 'secret2'); }); }, ); @@ -825,408 +961,89 @@ describe('VaultManager', () => { 'pull', ); - await vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId); + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); const vaultId = await vaultManager.getVaultId(vaultName); if (vaultId === undefined) fail('VaultId is not found.'); - const [file, secretsList] = await vaultManager.withVaults( - [vaultId], - async (vaultClone) => { - return await vaultClone.readF(async (efs) => { - const file = await efs.readFile('secret-1', { encoding: 'utf8' }); - const secretsList = await efs.readdir('.'); - return [file, secretsList]; - }); - }, - ); - expect(file).toBe('secret1'); - expect(secretsList).toContain('secret-1'); - expect(secretsList).toContain('secret-2'); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('should reject cloning when permissions are not set', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // Should reject with no permissions set - await testUtils.expectRemoteError( - vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId), - vaultsErrors.ErrorVaultsPermissionDenied, - ); - // No new vault created - expect((await vaultManager.listVaults()).size).toBe(0); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('should reject Pulling when permissions are not set', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // Setting permissions - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: localNodeId, + await vaultManager.withVaults([vaultId], async (vaultClone) => { + // History should only be 2 commits long + expect(await vaultClone.log()).toHaveLength(2); + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).not.toContain('secret-2'); + }); }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'clone', - ); - const clonedVaultId = await vaultManager.cloneVault( - remoteKeynode1Id, - remoteVaultId, + // Adding history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); + }); + }, ); - await testUtils.expectRemoteError( - vaultManager.pullVault({ vaultId: clonedVaultId }), - vaultsErrors.ErrorVaultsPermissionDenied, - ); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - // Test has been disabled due to non-deterministic failures in CI/CD - // test( - // 'can pull a cloned vault', - // async () => { - // const vaultManager = await VaultManager.createVaultManager({ - // vaultsPath, - // keyRing: dummyKeyRing, - // gestaltGraph: dummyGestaltGraph, - // nodeManager, - // acl: dummyACL, - // notificationsManager: dummyNotificationsManager, - // db, - // logger: logger.getChild(VaultManager.name), - // }); - // try { - // // Creating some state at the remote - // await remoteKeynode1.vaultManager.withVaults( - // [remoteVaultId], - // async (vault) => { - // await vault.writeF(async (efs) => { - // await efs.writeFile('secret-1', 'secret1'); - // }); - // }, - // ); - // - // // Setting permissions - // await remoteKeynode1.gestaltGraph.setNode({ - // nodeId: localNodeId, - // }); - // await remoteKeynode1.gestaltGraph.setGestaltAction( - // ['node', localNodeId], - // 'scan', - // ); - // await remoteKeynode1.acl.setVaultAction( - // remoteVaultId, - // localNodeId, - // 'clone', - // ); - // await remoteKeynode1.acl.setVaultAction( - // remoteVaultId, - // localNodeId, - // 'pull', - // ); - // - // await vaultManager.cloneVault(remoteKeynode1Id, vaultName); - // const vaultId = await vaultManager.getVaultId(vaultName); - // if (vaultId === undefined) fail('VaultId is not found.'); - // await vaultManager.withVaults([vaultId], async (vaultClone) => { - // return await vaultClone.readF(async (efs) => { - // const file = await efs.readFile('secret-1', { encoding: 'utf8' }); - // const secretsList = await efs.readdir('.'); - // expect(file).toBe('secret1'); - // expect(secretsList).toContain('secret-1'); - // expect(secretsList).not.toContain('secret-2'); - // }); - // }); - // - // // Creating new history - // await remoteKeynode1.vaultManager.withVaults( - // [remoteVaultId], - // async (vault) => { - // await vault.writeF(async (efs) => { - // await efs.writeFile('secret-2', 'secret2'); - // }); - // }, - // ); - // - // // Pulling vault - // await vaultManager.pullVault({ - // vaultId: vaultId, - // }); - // - // // Should have new data - // await vaultManager.withVaults([vaultId], async (vaultClone) => { - // return await vaultClone.readF(async (efs) => { - // const file = await efs.readFile('secret-1', { encoding: 'utf8' }); - // const secretsList = await efs.readdir('.'); - // expect(file).toBe('secret1'); - // expect(secretsList).toContain('secret-1'); - // expect(secretsList).toContain('secret-2'); - // }); - // }); - // } finally { - // await vaultManager?.stop(); - // await vaultManager?.destroy(); - // } - // }, - // globalThis.defaultTimeout * 2, - // ); - test( - 'manage pulling from different remotes', - async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + // Pulling vault + await vaultManager.pullVault({ + vaultId: vaultId, }); - try { - // Initial history - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - await remoteVault.writeF(async (efs) => { - await efs.writeFile(secretNames[0], 'success?'); - await efs.writeFile(secretNames[1], 'success?'); - }); - }, - ); - - // Setting permissions - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: localNodeId, - }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'clone', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'pull', - ); - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: remoteKeynode2Id, + // Should have new data + await vaultManager.withVaults([vaultId], async (vaultClone) => { + // History should only be 3 commits long + expect(await vaultClone.log()).toHaveLength(3); + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', remoteKeynode2Id], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - remoteKeynode2Id, - 'clone', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - remoteKeynode2Id, - 'pull', - ); + }); - const clonedVaultRemote2Id = - await remoteKeynode2.vaultManager.cloneVault( - remoteKeynode1Id, - remoteVaultId, - ); + // Adding history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }); + }, + ); - await remoteKeynode2.gestaltGraph.setNode({ - nodeId: localNodeId, - }); - await remoteKeynode2.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode2.acl.setVaultAction( - clonedVaultRemote2Id, - localNodeId, - 'clone', - ); - await remoteKeynode2.acl.setVaultAction( - clonedVaultRemote2Id, - localNodeId, - 'pull', - ); - const vaultCloneId = await vaultManager.cloneVault( - remoteKeynode2Id, - clonedVaultRemote2Id, - ); + // Pulling vault + await vaultManager.pullVault({ + vaultId: vaultId, + }); - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - await remoteVault.writeF(async (efs) => { - await efs.writeFile(secretNames[2], 'success?'); - }); - }, - ); - await vaultManager.pullVault({ - vaultId: vaultCloneId, - pullNodeId: remoteKeynode1Id, - pullVaultNameOrId: vaultName, - }); - await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { - await vaultClone.readF(async (efs) => { - expect((await efs.readdir('.')).sort()).toStrictEqual( - secretNames.slice(0, 3).sort(), - ); - }); - }); - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (remoteVault) => { - await remoteVault.writeF(async (efs) => { - await efs.writeFile(secretNames[3], 'second success?'); - }); - }, - ); - await vaultManager.pullVault({ vaultId: vaultCloneId }); - await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { - await vaultClone.readF(async (efs) => { - expect((await efs.readdir('.')).sort()).toStrictEqual( - secretNames.sort(), - ); - }); + // Should have new data + await vaultManager.withVaults([vaultId], async (vaultClone) => { + // History should only be 4 commits long + expect(await vaultClone.log()).toHaveLength(4); + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + expect(secretsList).toContain('secret-3'); }); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }, - globalThis.failedConnectionTimeout, - ); - test( - 'able to recover metadata after complex operations', - async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), }); - try { - const vaultNames = ['Vault1', 'Vault2', 'Vault3', 'Vault4', 'Vault5']; - const alteredVaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault6', - 'Vault10', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); - } - const v5 = await vaultManager.getVaultId('Vault5' as VaultName); - expect(v5).not.toBeUndefined(); - await vaultManager.destroyVault(v5!); - const v4 = await vaultManager.getVaultId('Vault4' as VaultName); - expect(v4).toBeTruthy(); - await vaultManager.renameVault(v4!, 'Vault10' as VaultName); - const v6 = await vaultManager.createVault('Vault6' as VaultName); - - await vaultManager.withVaults([v6], async (vault6) => { - await vault6.writeF(async (efs) => { - await efs.writeFile('reloaded', 'reload'); - }); - }); - - const vn: Array = []; - (await vaultManager.listVaults()).forEach((_, vaultName) => - vn.push(vaultName), - ); - expect(vn.sort()).toEqual(alteredVaultNames.sort()); - await vaultManager.stop(); - await vaultManager.start(); - await vaultManager.createVault('Vault7' as VaultName); - - const v10 = await vaultManager.getVaultId('Vault10' as VaultName); - expect(v10).not.toBeUndefined(); - alteredVaultNames.push('Vault7'); - expect((await vaultManager.listVaults()).size).toEqual( - alteredVaultNames.length, - ); - const vnAltered: Array = []; - (await vaultManager.listVaults()).forEach((_, vaultName) => - vnAltered.push(vaultName), - ); - expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); - const file = await vaultManager.withVaults( - [v6], - async (reloadedVault) => { - return await reloadedVault.readF(async (efs) => { - return await efs.readFile('reloaded', { encoding: 'utf8' }); - }); - }, - ); - - expect(file).toBe('reload'); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } }, globalThis.defaultTimeout * 2, ); - test('throw when trying to commit to a cloned vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { + test( + 'can pull a cloned vault with branched history', + async () => { // Creating some state at the remote await remoteKeynode1.vaultManager.withVaults( [remoteVaultId], async (vault) => { await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret1'); - await efs.writeFile('secret-2', 'secret2'); }); }, ); @@ -1254,266 +1071,301 @@ describe('VaultManager', () => { const vaultId = await vaultManager.getVaultId(vaultName); if (vaultId === undefined) fail('VaultId is not found.'); await vaultManager.withVaults([vaultId], async (vaultClone) => { - await expect( - vaultClone.writeF(async (efs) => { - await efs.writeFile('secret-3', 'secret3'); - }), - ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); + // History should only be 2 commits long + expect(await vaultClone.log()).toHaveLength(2); + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).not.toContain('secret-2'); + }); }); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test("test pulling a vault that isn't remote", async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // Creating some state at the remote - const vaultId = await vaultManager.createVault('testVault1'); - await expect(vaultManager.pullVault({ vaultId })).rejects.toThrow( - vaultsErrors.ErrorVaultRemoteUndefined, + + // Adding history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); + }); + }, ); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test( - 'pullVault respects locking', - async () => { - // This should respect the VaultManager read lock - // and the VaultInternal write lock - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + + // Pulling vault + await vaultManager.pullVault({ + vaultId: vaultId, }); - const pullVaultMock = jest.spyOn(VaultInternal.prototype, 'pullVault'); - const gitPullMock = jest.spyOn(git, 'pull'); - try { - // Creating some state at the remote - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (vault) => { - await vault.writeF(async (efs) => { - await efs.writeFile('secret-1', 'secret1'); - await efs.writeFile('secret-2', 'secret2'); - }); - }, - ); - // Setting permissions - await remoteKeynode1.gestaltGraph.setNode({ - nodeId: localNodeId, + // Should have new data + await vaultManager.withVaults([vaultId], async (vaultClone) => { + // History should only be 3 commits long + expect(await vaultClone.log()).toHaveLength(3); + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); }); - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', localNodeId], - 'scan', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'clone', - ); - await remoteKeynode1.acl.setVaultAction( - remoteVaultId, - localNodeId, - 'pull', - ); - - await vaultManager.cloneVault(remoteKeynode1Id, vaultName); - const vaultId = await vaultManager.getVaultId(vaultName); - if (vaultId === undefined) fail('VaultId is not found.'); - - // Creating new history - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (vault) => { - await vault.writeF(async (efs) => { - await efs.writeFile('secret-2', 'secret2'); - }); - }, - ); + }); - // @ts-ignore: kidnap vaultManager map and grabbing lock - const vaultsMap = vaultManager.vaultMap; - const vault = vaultsMap.get(vaultId.toString() as VaultIdString); - // @ts-ignore: kidnap vaultManager lockBox - const vaultLocks = vaultManager.vaultLocks; - const [releaseWrite] = await vaultLocks.lock([ - vaultId.toString(), - RWLockWriter, - 'write', - ])(); - - // Pulling vault respects VaultManager write lock - const pullP = vaultManager.pullVault({ - vaultId: vaultId, - }); - await sleep(200); - expect(pullVaultMock).not.toHaveBeenCalled(); - await releaseWrite(); - await pullP; - expect(pullVaultMock).toHaveBeenCalled(); - pullVaultMock.mockClear(); - - // Creating new history - await remoteKeynode1.vaultManager.withVaults( - [remoteVaultId], - async (vault) => { - await vault.writeF(async (efs) => { - await efs.writeFile('secret-3', 'secret3'); - }); - }, - ); + // Branching history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + const branchCommit = (await vault.log())[1].commitId; + await vault.version(branchCommit); + await vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }); + }, + ); - // Respects VaultInternal write lock - // @ts-ignore: kidnap vault lock - const vaultLock = vault!.lock; - const [releaseVaultWrite] = await vaultLock.write()(); - // Pulling vault respects VaultManager write lock - gitPullMock.mockClear(); - const pullP2 = vaultManager.pullVault({ + // Pulling vault should throw + await expect( + vaultManager.pullVault({ vaultId: vaultId, - }); - await sleep(200); - expect(gitPullMock).not.toHaveBeenCalled(); - await releaseVaultWrite(); - await pullP2; - expect(gitPullMock).toHaveBeenCalled(); - } finally { - pullVaultMock.mockRestore(); - gitPullMock.mockRestore(); - await vaultManager?.stop(); - await vaultManager?.destroy(); - } + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultsMergeConflict); }, - globalThis.failedConnectionTimeout, + globalThis.defaultTimeout * 2, ); - // Disabled for now, failing in CI and the core network logic is subject to - // change with the QUIC update - test('scanVaults should get all vaults with permissions from remote node', async () => { - // 1. we need to set up state - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing, - nodeManager, - acl: {} as any, - gestaltGraph: {} as any, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // Setting up state - const targetNodeId = remoteKeynode1.keyRing.getNodeId(); - const nodeId1 = keyRing.getNodeId(); - - // Letting nodeGraph know where the remote agent is - await nodeGraph.setNodeContactAddressData( - targetNodeId, - [remoteKeynode1.agentServiceHost, remoteKeynode1.agentServicePort], - { - mode: 'direct', - connectedTime: Date.now(), - scopes: ['global'], + test( + 'manage pulling from different remotes', + async () => { + // Initial history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[0], 'success?'); + await efs.writeFile(secretNames[1], 'success?'); + }); }, ); + // Setting permissions await remoteKeynode1.gestaltGraph.setNode({ - nodeId: nodeId1, + nodeId: localNodeId, }); - - const vault1 = await remoteKeynode1.vaultManager.createVault( - 'testVault1' as VaultName, + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', ); - const vault2 = await remoteKeynode1.vaultManager.createVault( - 'testVault2' as VaultName, + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', ); - const vault3 = await remoteKeynode1.vaultManager.createVault( - 'testVault3' as VaultName, + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', ); - // Scanning vaults - - // Should throw due to no permission - const testFun = async () => { - for await (const _ of vaultManager.scanVaults(targetNodeId)) { - // Should throw - } - }; - await testUtils.expectRemoteError( - testFun(), - vaultsErrors.ErrorVaultsPermissionDenied, - ); - // Should throw due to lack of scan permission + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: remoteKeynode2Id, + }); await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', nodeId1], - 'notify', + ['node', remoteKeynode2Id], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + remoteKeynode2Id, + 'clone', ); - await testUtils.expectRemoteError( - testFun(), - vaultsErrors.ErrorVaultsPermissionDenied, + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + remoteKeynode2Id, + 'pull', ); - // Setting permissions - await remoteKeynode1.gestaltGraph.setGestaltAction( - ['node', nodeId1], + const clonedVaultRemote2Id = + await remoteKeynode2.vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + + await remoteKeynode2.gestaltGraph.setNode({ + nodeId: localNodeId, + }); + await remoteKeynode2.gestaltGraph.setGestaltAction( + ['node', localNodeId], 'scan', ); - await remoteKeynode1.acl.setVaultAction(vault1, nodeId1, 'clone'); - await remoteKeynode1.acl.setVaultAction(vault1, nodeId1, 'pull'); - await remoteKeynode1.acl.setVaultAction(vault2, nodeId1, 'clone'); - // No permissions for vault3 - - const gen = vaultManager.scanVaults(targetNodeId); - const vaults: Record = {}; - for await (const vault of gen) { - vaults[vault.vaultIdEncoded] = [ - vault.vaultName, - vault.vaultPermissions, - ]; + await remoteKeynode2.acl.setVaultAction( + clonedVaultRemote2Id, + localNodeId, + 'clone', + ); + await remoteKeynode2.acl.setVaultAction( + clonedVaultRemote2Id, + localNodeId, + 'pull', + ); + const vaultCloneId = await vaultManager.cloneVault( + remoteKeynode2Id, + clonedVaultRemote2Id, + ); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[2], 'success?'); + }); + }, + ); + await vaultManager.pullVault({ + vaultId: vaultCloneId, + pullNodeId: remoteKeynode1Id, + pullVaultNameOrId: vaultName, + }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + await vaultClone.readF(async (efs) => { + expect((await efs.readdir('.')).sort()).toStrictEqual( + secretNames.slice(0, 3).sort(), + ); + }); + }); + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[3], 'second success?'); + }); + }, + ); + await vaultManager.pullVault({ vaultId: vaultCloneId }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + await vaultClone.readF(async (efs) => { + expect((await efs.readdir('.')).sort()).toStrictEqual( + secretNames.sort(), + ); + }); + }); + }, + globalThis.failedConnectionTimeout, + ); + test( + 'able to recover metadata after complex operations', + async () => { + const vaultNames = ['Vault1', 'Vault2', 'Vault3', 'Vault4', 'Vault5']; + const alteredVaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault6', + 'Vault10', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); } + const v5 = await vaultManager.getVaultId('Vault5' as VaultName); + expect(v5).not.toBeUndefined(); + await vaultManager.destroyVault(v5!); + const v4 = await vaultManager.getVaultId('Vault4' as VaultName); + expect(v4).toBeTruthy(); + await vaultManager.renameVault(v4!, 'Vault10' as VaultName); + const v6 = await vaultManager.createVault('Vault6' as VaultName); + + await vaultManager.withVaults([v6], async (vault6) => { + await vault6.writeF(async (efs) => { + await efs.writeFile('reloaded', 'reload'); + }); + }); - expect(vaults[vaultsUtils.encodeVaultId(vault1)]).toEqual([ - 'testVault1', - ['clone', 'pull'], - ]); - expect(vaults[vaultsUtils.encodeVaultId(vault2)]).toEqual([ - 'testVault2', - ['clone'], - ]); - expect(vaults[vaultsUtils.encodeVaultId(vault3)]).toBeUndefined(); - } finally { + const vn: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vn.push(vaultName), + ); + expect(vn.sort()).toEqual(alteredVaultNames.sort()); await vaultManager.stop(); - } - }); - test('can handle name conflict when cloning', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + await vaultManager.start(); + await vaultManager.createVault('Vault7' as VaultName); + + const v10 = await vaultManager.getVaultId('Vault10' as VaultName); + expect(v10).not.toBeUndefined(); + alteredVaultNames.push('Vault7'); + expect((await vaultManager.listVaults()).size).toEqual( + alteredVaultNames.length, + ); + const vnAltered: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vnAltered.push(vaultName), + ); + expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); + const file = await vaultManager.withVaults( + [v6], + async (reloadedVault) => { + return await reloadedVault.readF(async (efs) => { + return await efs.readFile('reloaded', { encoding: 'utf8' }); + }); + }, + ); + + expect(file).toBe('reload'); + }, + globalThis.defaultTimeout * 2, + ); + test('throw when trying to commit to a cloned vault', async () => { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, }); - try { + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + await vaultManager.withVaults([vaultId], async (vaultClone) => { + await expect( + vaultClone.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); + }); + }); + test("test pulling a vault that isn't remote", async () => { + // Creating some state at the remote + const vaultId = await vaultManager.createVault('testVault1'); + await expect(vaultManager.pullVault({ vaultId })).rejects.toThrow( + vaultsErrors.ErrorVaultRemoteUndefined, + ); + }); + test( + 'pullVault respects locking', + async () => { + // This should respect the VaultManager read lock + // and the VaultInternal write lock + const pullVaultMock = jest.spyOn(VaultInternal.prototype, 'pullVault'); + const gitPullMock = jest.spyOn(git, 'pull'); // Creating some state at the remote await remoteKeynode1.vaultManager.withVaults( [remoteVaultId], @@ -1544,455 +1396,207 @@ describe('VaultManager', () => { 'pull', ); - // Before cloning we create a local vault - await vaultManager.createVault(vaultName); - await vaultManager.createVault(`${vaultName}-1`); - await vaultManager.createVault(`${vaultName}-2`); - await vaultManager.createVault(`${vaultName}-3`); - await vaultManager.createVault(`${vaultName}-4`); - await vaultManager.createVault(`${vaultName}-5`); - - const vaultId = await vaultManager.cloneVault( - remoteKeynode1Id, - vaultName, - ); + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); if (vaultId === undefined) fail('VaultId is not found.'); - const [file, secretsList] = await vaultManager.withVaults( - [vaultId], - async (vaultClone) => { - return await vaultClone.readF(async (efs) => { - const file = await efs.readFile('secret-1', { encoding: 'utf8' }); - const secretsList = await efs.readdir('.'); - return [file, secretsList]; + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); }); }, ); - expect(file).toBe('secret1'); - expect(secretsList).toContain('secret-1'); - expect(secretsList).toContain('secret-2'); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - }); - test('handleScanVaults should list all vaults with permissions', async () => { - // 1. we need to set up state - const acl = await ACL.createACL({ - db, - logger, - }); - const gestaltGraph = await GestaltGraph.createGestaltGraph({ - db, - acl, - logger, - }); - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - nodeManager: dummyNodeManager, - acl, - gestaltGraph, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // Setting up state - const nodeId1 = nodeTestUtils.generateRandomNodeId(); - const nodeId2 = nodeTestUtils.generateRandomNodeId(); - await gestaltGraph.setNode({ + + // @ts-ignore: kidnap vaultManager map and grabbing lock + const vaultsMap = vaultManager.vaultMap; + const vault = vaultsMap.get(vaultId.toString() as VaultIdString); + // @ts-ignore: kidnap vaultManager lockBox + const vaultLocks = vaultManager.vaultLocks; + const [releaseWrite] = await vaultLocks.lock([ + vaultId.toString(), + RWLockWriter, + 'write', + ])(); + + // Pulling vault respects VaultManager write lock + const pullP = vaultManager.pullVault({ + vaultId: vaultId, + }); + await sleep(200); + expect(pullVaultMock).not.toHaveBeenCalled(); + await releaseWrite(); + await pullP; + expect(pullVaultMock).toHaveBeenCalled(); + pullVaultMock.mockClear(); + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }); + }, + ); + + // Respects VaultInternal write lock + // @ts-ignore: kidnap vault lock + const vaultLock = vault!.lock; + const [releaseVaultWrite] = await vaultLock.write()(); + // Pulling vault respects VaultManager write lock + gitPullMock.mockClear(); + const pullP2 = vaultManager.pullVault({ + vaultId: vaultId, + }); + await sleep(200); + expect(gitPullMock).not.toHaveBeenCalled(); + await releaseVaultWrite(); + await pullP2; + expect(gitPullMock).toHaveBeenCalled(); + }, + globalThis.failedConnectionTimeout, + ); + test('scanVaults should get all vaults with permissions from remote node', async () => { + // Setting up state + const targetNodeId = remoteKeynode1.keyRing.getNodeId(); + const nodeId1 = keyRing.getNodeId(); + + // Letting nodeGraph know where the remote agent is + await nodeGraph.setNodeContactAddressData( + targetNodeId, + [remoteKeynode1.agentServiceHost, remoteKeynode1.agentServicePort], + { + mode: 'direct', + connectedTime: Date.now(), + scopes: ['global'], + }, + ); + + await remoteKeynode1.gestaltGraph.setNode({ nodeId: nodeId1, }); - await gestaltGraph.setNode({ - nodeId: nodeId2, - }); - await gestaltGraph.setGestaltAction(['node', nodeId1], 'scan'); - const vault1 = await vaultManager.createVault('testVault1' as VaultName); - const vault2 = await vaultManager.createVault('testVault2' as VaultName); - const vault3 = await vaultManager.createVault('testVault3' as VaultName); + const vault1 = await remoteKeynode1.vaultManager.createVault( + 'testVault1' as VaultName, + ); + const vault2 = await remoteKeynode1.vaultManager.createVault( + 'testVault2' as VaultName, + ); + const vault3 = await remoteKeynode1.vaultManager.createVault( + 'testVault3' as VaultName, + ); + + // Scanning vaults + + // Should throw due to no permission + const testFun = async () => { + for await (const _ of vaultManager.scanVaults(targetNodeId)) { + // Should throw + } + }; + await testUtils.expectRemoteError( + testFun(), + vaultsErrors.ErrorVaultsPermissionDenied, + ); + // Should throw due to lack of scan permission + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', nodeId1], + 'notify', + ); + await testUtils.expectRemoteError( + testFun(), + vaultsErrors.ErrorVaultsPermissionDenied, + ); // Setting permissions - await acl.setVaultAction(vault1, nodeId1, 'clone'); - await acl.setVaultAction(vault1, nodeId1, 'pull'); - await acl.setVaultAction(vault2, nodeId1, 'clone'); + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', nodeId1], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction(vault1, nodeId1, 'clone'); + await remoteKeynode1.acl.setVaultAction(vault1, nodeId1, 'pull'); + await remoteKeynode1.acl.setVaultAction(vault2, nodeId1, 'clone'); // No permissions for vault3 - // scanning vaults - const gen = vaultManager.handleScanVaults(nodeId1); + const gen = vaultManager.scanVaults(targetNodeId); const vaults: Record = {}; for await (const vault of gen) { - vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; + vaults[vault.vaultIdEncoded] = [ + vault.vaultName, + vault.vaultPermissions, + ]; } - expect(vaults[vault1]).toEqual(['testVault1', ['clone', 'pull']]); - expect(vaults[vault2]).toEqual(['testVault2', ['clone']]); - expect(vaults[vault3]).toBeUndefined(); - // Should throw due to no permission - await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2)) { - // Should throw - } - }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); - // Should throw due to lack of scan permission - await gestaltGraph.setGestaltAction(['node', nodeId2], 'notify'); - await expect(async () => { - for await (const _ of vaultManager.handleScanVaults(nodeId2)) { - // Should throw - } - }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); - } finally { - await vaultManager.stop(); - await vaultManager.destroy(); - await gestaltGraph.stop(); - await gestaltGraph.destroy(); - await acl.stop(); - await acl.destroy(); - } - }); - test('stopping respects locks', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), + expect(vaults[vaultsUtils.encodeVaultId(vault1)]).toEqual([ + 'testVault1', + ['clone', 'pull'], + ]); + expect(vaults[vaultsUtils.encodeVaultId(vault2)]).toEqual([ + 'testVault2', + ['clone'], + ]); + expect(vaults[vaultsUtils.encodeVaultId(vault3)]).toBeUndefined(); }); - try { - // @ts-ignore: kidnapping the map - const vaultMap = vaultManager.vaultMap; - // @ts-ignore: kidnap vaultManager lockBox - const vaultLocks = vaultManager.vaultLocks; + test('can handle name conflict when cloning', async () => { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); - // Create the vault - const vaultId = await vaultManager.createVault('vaultName'); - const vaultIdString = vaultId.toString() as VaultIdString; - // Getting and holding the lock - const vault = vaultMap.get(vaultIdString)!; - const [releaseWrite] = await vaultLocks.lock([ - vaultId.toString(), - RWLockWriter, - 'write', - ])(); - // Try to destroy - const closeP = vaultManager.closeVault(vaultId); - await sleep(1000); - // Shouldn't be closed - expect(vault[running]).toBe(true); - expect(vaultMap.get(vaultIdString)).toBeDefined(); - // Release the lock - await releaseWrite(); - await closeP; - expect(vault[running]).toBe(false); - expect(vaultMap.get(vaultIdString)).toBeUndefined(); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('destroying respects locks', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // @ts-ignore: kidnapping the map - const vaultMap = vaultManager.vaultMap; - // @ts-ignore: kidnap vaultManager lockBox - const vaultLocks = vaultManager.vaultLocks; - // Create the vault - const vaultId = await vaultManager.createVault('vaultName'); - const vaultIdString = vaultId.toString() as VaultIdString; - // Getting and holding the lock - const vault = vaultMap.get(vaultIdString)!; - const [releaseWrite] = await vaultLocks.lock([ - vaultId.toString(), - RWLockWriter, - 'write', - ])(); - // Try to destroy - const destroyP = vaultManager.destroyVault(vaultId); - await sleep(1000); - // Shouldn't be destroyed - expect(vault[destroyed]).toBe(false); - expect(vaultMap.get(vaultIdString)).toBeDefined(); - // Release the lock - await releaseWrite(); - await destroyP; - expect(vault[destroyed]).toBe(true); - expect(vaultMap.get(vaultIdString)).toBeUndefined(); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('withVault respects locks', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - // @ts-ignore: kidnap vaultManager lockBox - const vaultLocks = vaultManager.vaultLocks; - // Create the vault - const vaultId = await vaultManager.createVault('vaultName'); - // Getting and holding the lock - const [releaseWrite] = await vaultLocks.lock([ - vaultId.toString(), - RWLockWriter, - 'write', - ])(); - // Try to use vault - let finished = false; - const withP = vaultManager.withVaults([vaultId], async () => { - finished = true; + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + nodeId: localNodeId, }); - await sleep(1000); - // Shouldn't be destroyed - expect(finished).toBe(false); - // Release the lock - await releaseWrite(); - await withP; - expect(finished).toBe(true); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('creation adds a vault', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - await vaultManager.createVault(vaultName); - // @ts-ignore: kidnapping the map - const vaultMap = vaultManager.vaultMap; - expect(vaultMap.size).toBe(1); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('vaults persist', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - const vaultId = await vaultManager.createVault(vaultName); - await vaultManager.closeVault(vaultId); - // @ts-ignore: kidnapping the map - const vaultMap = vaultManager.vaultMap; - expect(vaultMap.size).toBe(0); - - // @ts-ignore: protected method - const vault1 = await vaultManager.getVault(vaultId); - expect(vaultMap.size).toBe(1); + await remoteKeynode1.gestaltGraph.setGestaltAction( + ['node', localNodeId], + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); - // @ts-ignore: protected method - const vault2 = await vaultManager.getVault(vaultId); - expect(vaultMap.size).toBe(1); - expect(vault1).toEqual(vault2); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('vaults can be removed from map', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - const vaultId = await vaultManager.createVault(vaultName); - // @ts-ignore: kidnapping the map - const vaultMap = vaultManager.vaultMap; - expect(vaultMap.size).toBe(1); - // @ts-ignore: protected method - const vault1 = await vaultManager.getVault(vaultId); - await vaultManager.closeVault(vaultId); - expect(vaultMap.size).toBe(0); - // @ts-ignore: protected method - const vault2 = await vaultManager.getVault(vaultId); - expect(vault1).not.toEqual(vault2); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('stopping vaultManager empties map and stops all vaults', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - const vaultId1 = await vaultManager.createVault('vault1'); - const vaultId2 = await vaultManager.createVault('vault2'); - // @ts-ignore: kidnapping the map - const vaultMap = vaultManager.vaultMap; - expect(vaultMap.size).toBe(2); - // @ts-ignore: protected method - const vault1 = await vaultManager.getVault(vaultId1); - // @ts-ignore: protected method - const vault2 = await vaultManager.getVault(vaultId2); - await vaultManager.stop(); - expect(vaultMap.size).toBe(0); - expect(vault1[running]).toBe(false); - expect(vault2[running]).toBe(false); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('destroying vaultManager destroys all data', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - let vaultManager2: VaultManager | undefined; - try { - const vaultId = await vaultManager.createVault('vault1'); - await vaultManager.stop(); - await vaultManager.destroy(); - // Vaults DB should be empty - expect(await db.count([VaultManager.constructor.name])).toBe(0); - vaultManager2 = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); + // Before cloning we create a local vault + await vaultManager.createVault(vaultName); + await vaultManager.createVault(`${vaultName}-1`); + await vaultManager.createVault(`${vaultName}-2`); + await vaultManager.createVault(`${vaultName}-3`); + await vaultManager.createVault(`${vaultName}-4`); + await vaultManager.createVault(`${vaultName}-5`); - // @ts-ignore: protected method - await expect(vaultManager2.getVault(vaultId)).rejects.toThrow( - vaultsErrors.ErrorVaultsVaultUndefined, + const vaultId = await vaultManager.cloneVault( + remoteKeynode1Id, + vaultName, ); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - await vaultManager2?.stop(); - await vaultManager2?.destroy(); - } - }); - test("withVaults should throw if vaultId doesn't exist", async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - try { - const vaultId = vaultIdGenerator(); - await expect( - vaultManager.withVaults([vaultId], async () => { - // Do nothing - }), - ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } - }); - test('generateVaultId handles vault conflicts', async () => { - const vaultManager = await VaultManager.createVaultManager({ - vaultsPath, - keyRing: dummyKeyRing, - gestaltGraph: dummyGestaltGraph, - nodeManager: dummyNodeManager, - acl: dummyACL, - notificationsManager: dummyNotificationsManager, - db, - logger: logger.getChild(VaultManager.name), - }); - const generateVaultIdMock = jest.spyOn( - vaultManager as any, - 'vaultIdGenerator', - ); - try { - // Generate 100 ids - const vaultIds: VaultId[] = []; - for (let i = 0; i < 100; i++) { - vaultIds.push( - // @ts-ignore: protected method - vaultsUtils.encodeVaultId(await vaultManager.generateVaultId()), - ); - } - const duplicates = vaultIds.filter( - (item, index) => vaultIds.indexOf(item) !== index, + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, ); - expect(duplicates.length).toBe(0); - - const vaultId = await vaultManager.createVault('testVault'); - // Now only returns duplicates - // @ts-ignore - mocking protected method - generateVaultIdMock.mockReturnValue(vaultId); - const asd = async () => { - for (let i = 0; i < 100; i++) { - // @ts-ignore: protected method - await vaultManager.generateVaultId(); - } - }; - await expect(async () => { - return await asd(); - }).rejects.toThrow(vaultsErrors.ErrorVaultsCreateVaultId); - } finally { - await vaultManager?.stop(); - await vaultManager?.destroy(); - } + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + }); }); }); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index 86dd18c46..ea34f63eb 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -5,12 +5,12 @@ import type { LevelPath } from '@matrixai/db'; import fs from 'fs'; import path from 'path'; import os from 'os'; -import { EncryptedFS } from 'encryptedfs'; +import { EncryptedFS, Stat } from 'encryptedfs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; -import * as errors from '@/vaults/errors'; import VaultInternal from '@/vaults/VaultInternal'; import * as vaultOps from '@/vaults/VaultOps'; +import * as vaultsErrors from '@/vaults/errors'; import * as vaultsUtils from '@/vaults/utils'; import * as keysUtils from '@/keys/utils'; import * as testNodesUtils from '../nodes/utils'; @@ -18,6 +18,12 @@ import * as testNodesUtils from '../nodes/utils'; describe('VaultOps', () => { const logger = new Logger('VaultOps', LogLevel.WARN, [new StreamHandler()]); + const secretName = 'secret'; + const secretNameNew = 'secret-new'; + const secretContent = 'secret-content'; + const secretContentNew = 'secret-content-new'; + const dirName = 'dir'; + let dataDir: string; let baseEfs: EncryptedFS; let vaultId: VaultId; @@ -69,7 +75,6 @@ describe('VaultOps', () => { }); vault = vaultInternal as Vault; }); - afterEach(async () => { await vaultInternal.stop(); await vaultInternal.destroy(); @@ -83,228 +88,391 @@ describe('VaultOps', () => { }); }); - test('adding a secret', async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - const dir = await vault.readF(async (efs) => { - return await efs.readdir('.'); + async function writeSecret(secretPath: string, contents: string) { + return await vault.writeF(async (efs) => { + await vaultsUtils.mkdirExists(efs, path.dirname(secretPath)); + await efs.writeFile(secretPath, contents); }); - expect(dir).toContain('secret-1'); - }); - test('adding a secret and getting it', async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - const secret = await vaultOps.getSecret(vault, 'secret-1'); - expect(secret.toString()).toBe('secret-content'); - await expect(() => - vaultOps.getSecret(vault, 'doesnotexist'), - ).rejects.toThrow(errors.ErrorSecretsSecretUndefined); - }); - test('able to make directories', async () => { - await vaultOps.mkdir(vault, 'dir-1', { recursive: true }); - await vaultOps.mkdir(vault, 'dir-2', { recursive: true }); - await vaultOps.mkdir(vault, path.join('dir-3', 'dir-4'), { - recursive: true, + } + + async function readSecret(path: string) { + return await vault.readF(async (efs) => { + return await efs.readFile(path); }); - await vaultOps.addSecret( - vault, - path.join('dir-3', 'dir-4', 'secret-1'), - 'secret-content', + } + + async function expectSecret(path: string, contentsExpected: string) { + const contentsSecretP = readSecret(path); + await expect(contentsSecretP).resolves.toBeDefined(); + const contentsSecretValue = (await contentsSecretP).toString(); + expect(contentsSecretValue).toBe(contentsExpected); + } + + async function expectSecretNot(path: string) { + const contentsSecretP = readSecret(path); + await expect(contentsSecretP).rejects.toThrow( + 'ENOENT: no such file or directory', ); - await vault.readF(async (efs) => { - const dir = await efs.readdir('.'); - expect(dir).toContain('dir-1'); - expect(dir).toContain('dir-2'); - expect(dir).toContain('dir-3'); + } - expect(await efs.readdir('dir-3')).toContain('dir-4'); - expect(await efs.readdir(path.join('dir-3', 'dir-4'))).toContain( - 'secret-1', - ); + async function mkdir(path: string) { + return await vault.writeF(async (efs) => { + await vaultsUtils.mkdirExists(efs, path); }); - }); - test( - 'adding and committing a secret 10 times', - async () => { - const content = 'secret-content'; - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - await vaultOps.addSecret(vault, name, content); - expect( - (await vaultOps.getSecret(vault, name)).toString(), - ).toStrictEqual(content); + } - await expect(vault.readF((efs) => efs.readdir('.'))).resolves.toContain( - name, - ); - } - }, - globalThis.defaultTimeout * 4, - ); - test( - 'updating secret content', - async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - await vaultOps.updateSecret(vault, 'secret-1', 'secret-content-change'); - expect( - (await vaultOps.getSecret(vault, 'secret-1')).toString(), - ).toStrictEqual('secret-content-change'); - }, - globalThis.defaultTimeout * 4, - ); - test('updating secret content within a directory', async () => { - await vaultOps.mkdir(vault, path.join('dir-1', 'dir-2'), { - recursive: true, + async function expectDirExists(path: string) { + return await vault.readF(async (efs) => { + const dirP = efs.readdir(path); + await expect(dirP).resolves.toBeDefined(); }); - await vaultOps.addSecret( - vault, - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vaultOps.updateSecret( - vault, - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content-change', + } + + async function expectDirExistsNot(path: string) { + return await vault.readF(async (efs) => { + const dirP = efs.readdir(path); + await expect(dirP).rejects.toThrow('ENOENT'); + }); + } + + // Adding secrets + describe('addSecret', () => { + test('adding a secret', async () => { + await vaultOps.addSecret(vault, secretName, secretContent); + await expectSecret(secretName, secretContent); + }); + test('add a secret under an existing directory', async () => { + await mkdir(dirName); + const secretPath = path.join(dirName, secretName); + await vaultOps.addSecret(vault, secretPath, secretContent); + await expectSecret(secretPath, secretContent); + }); + test('add a secret creating directory', async () => { + const secretPath = path.join(dirName, secretName); + await vaultOps.addSecret(vault, secretPath, secretContent); + await expectSecret(secretPath, secretContent); + }); + test( + 'adding a secret multiple times', + async () => { + for (let i = 0; i < 5; i++) { + const name = `${secretName}+${i}`; + await vaultOps.addSecret(vault, name, secretContent); + await expectSecret(name, secretContent); + } + }, + globalThis.defaultTimeout * 4, ); - expect( - ( - await vaultOps.getSecret(vault, path.join('dir-1', 'dir-2', 'secret-1')) - ).toString(), - ).toStrictEqual('secret-content-change'); + test('adding a secret that already exists should fail', async () => { + await vaultOps.addSecret(vault, secretName, secretContent); + const addSecretP = vaultOps.addSecret(vault, secretName, secretContent); + await expect(addSecretP).rejects.toThrow( + vaultsErrors.ErrorSecretsSecretDefined, + ); + }); }); - test( - 'updating a secret 10 times', - async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - for (let i = 0; i < 10; i++) { - const content = 'secret-content'; - await vaultOps.updateSecret(vault, 'secret-1', content); - expect( - (await vaultOps.getSecret(vault, 'secret-1')).toString(), - ).toStrictEqual(content); - } - }, - globalThis.defaultTimeout * 2, - ); - test('deleting a secret', async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - await vaultOps.mkdir(vault, 'dir-1'); - await vaultOps.addSecret( - vault, - path.join('dir-1', 'secret-2'), - 'secret-content', - ); - await vaultOps.deleteSecret(vault, 'secret-1'); - await expect(() => vaultOps.deleteSecret(vault, 'dir-1')).rejects.toThrow(); - await vaultOps.deleteSecret(vault, path.join('dir-1', 'secret-2')); - await vaultOps.deleteSecret(vault, 'dir-1'); - await expect(vault.readF((efs) => efs.readdir('.'))).resolves.not.toContain( - 'secret-1', + describe('updateSecret', () => { + test('updating secret content', async () => { + await writeSecret(secretName, secretContent); + await vaultOps.updateSecret(vault, secretName, secretContentNew); + await expectSecret(secretName, secretContentNew); + }); + test('updating secret content within a directory', async () => { + const secretPath = path.join(dirName, secretName); + await writeSecret(secretPath, secretContent); + await vaultOps.updateSecret(vault, secretPath, secretContentNew); + await expectSecret(secretPath, secretContentNew); + }); + test( + 'updating a secret multiple times', + async () => { + await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); + await writeSecret(secretName, secretContent); + for (let i = 0; i < 5; i++) { + const contentNew = `${secretContentNew}${i}`; + await vaultOps.updateSecret(vault, secretName, contentNew); + await expectSecret(secretName, contentNew); + } + }, + globalThis.defaultTimeout * 2, ); + test('updating a secret that does not exist should fail', async () => { + await expect( + vaultOps.updateSecret(vault, secretName, secretContentNew), + ).rejects.toThrow(vaultsErrors.ErrorSecretsSecretUndefined); + }); }); - test('deleting a secret within a directory', async () => { - await expect(() => - vaultOps.mkdir(vault, path.join('dir-1', 'dir-2')), - ).rejects.toThrow(errors.ErrorVaultsRecursive); - await vaultOps.mkdir(vault, path.join('dir-1', 'dir-2'), { - recursive: true, + describe('renameSecret', () => { + test('renaming a secret', async () => { + await writeSecret(secretName, secretContent); + await vaultOps.renameSecret(vault, secretName, secretNameNew); + await expectSecretNot(secretName); + await expectSecret(secretNameNew, secretContent); }); - await vaultOps.addSecret( - vault, - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vaultOps.deleteSecret(vault, path.join('dir-1', 'dir-2'), { - recursive: true, + test('renaming a secret within a directory', async () => { + const secretPath = path.join(dirName, secretName); + const secretPathNew = path.join(dirName, secretNameNew); + await writeSecret(secretPath, secretContent); + await vaultOps.renameSecret(vault, secretPath, secretPathNew); + await expectSecretNot(secretPath); + await expectSecret(secretPathNew, secretContent); + }); + test('renaming a secret that does not exist should fail', async () => { + await expect( + vaultOps.renameSecret(vault, secretName, secretNameNew), + ).rejects.toThrow(vaultsErrors.ErrorSecretsSecretUndefined); }); - await expect( - vault.readF((efs) => efs.readdir('dir-1')), - ).resolves.not.toContain('dir-2'); }); - test( - 'deleting a secret 10 times', - async () => { - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vaultOps.addSecret(vault, name, content); - expect( - (await vaultOps.getSecret(vault, name)).toString(), - ).toStrictEqual(content); - await vaultOps.deleteSecret(vault, name, { recursive: true }); - await expect( - vault.readF((efs) => efs.readdir('.')), - ).resolves.not.toContain(name); - } - }, - globalThis.defaultTimeout * 4, - ); - test('renaming a secret', async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - await vaultOps.renameSecret(vault, 'secret-1', 'secret-change'); - const dir = vault.readF((efs) => efs.readdir('.')); - await expect(dir).resolves.not.toContain('secret-1'); - await expect(dir).resolves.toContain('secret-change'); + describe('getSecret', () => { + test('can get a secret', async () => { + await writeSecret(secretName, secretContent); + const secret = await vaultOps.getSecret(vault, secretName); + expect(secret.toString()).toBe(secretContent); + }); + test('getting a secret that does not exist should fail', async () => { + await expect(vaultOps.getSecret(vault, secretName)).rejects.toThrow( + vaultsErrors.ErrorSecretsSecretUndefined, + ); + }); + test('getting a directory should fail', async () => { + await mkdir(dirName); + await expect(vaultOps.getSecret(vault, dirName)).rejects.toThrow( + vaultsErrors.ErrorSecretsIsDirectory, + ); + }); }); - test('renaming a secret within a directory', async () => { - const dirPath = path.join('dir-1', 'dir-2'); - await vaultOps.mkdir(vault, dirPath, { recursive: true }); - await vaultOps.addSecret( - vault, - path.join(dirPath, 'secret-1'), - 'secret-content', - ); - await vaultOps.renameSecret( - vault, - path.join(dirPath, 'secret-1'), - path.join(dirPath, 'secret-change'), - ); - await expect(vault.readF((efs) => efs.readdir(dirPath))).resolves.toContain( - `secret-change`, - ); + describe('statSecret', () => { + test('can get stat of a secret', async () => { + await writeSecret(secretName, secretContent); + const stat = await vaultOps.statSecret(vault, secretName); + expect(stat).toBeInstanceOf(Stat); + expect(stat.nlink).toBe(1); + }); + test('can get stat of a directory', async () => { + await mkdir(dirName); + const stat = await vaultOps.statSecret(vault, dirName); + expect(stat).toBeInstanceOf(Stat); + }); + test('getting stat of secret that does not exist should fail', async () => { + await expect(vaultOps.statSecret(vault, secretName)).rejects.toThrow( + vaultsErrors.ErrorSecretsSecretUndefined, + ); + }); }); - test('listing secrets', async () => { - await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - await vaultOps.addSecret(vault, 'secret-2', 'secret-content'); - await vaultOps.mkdir(vault, path.join('dir1', 'dir2'), { recursive: true }); - await vaultOps.addSecret( - vault, - path.join('dir1', 'dir2', 'secret-3'), - 'secret-content', - ); - expect((await vaultOps.listSecrets(vault)).sort()).toStrictEqual( - ['secret-1', 'secret-2', 'dir1/dir2/secret-3'].sort(), - ); + describe('deleteSecret', () => { + test('deleting a secret', async () => { + await writeSecret(secretName, secretContent); + await vaultOps.deleteSecret(vault, secretName); + await expectSecretNot(secretName); + }); + test('deleting a secret in a directory', async () => { + const secretPath = path.join(dirName, secretName); + await writeSecret(secretPath, secretContent); + await vaultOps.deleteSecret(vault, secretPath); + await expectSecretNot(secretPath); + await expectDirExists(dirName); + }); + test('deleting a directory', async () => { + await mkdir(dirName); + await vaultOps.deleteSecret(vault, dirName); + await expectDirExistsNot(dirName); + }); + test('deleting a directory with a file should fail', async () => { + const secretPath = path.join(dirName, secretName); + await writeSecret(secretPath, secretContent); + await expect(vaultOps.deleteSecret(vault, dirName)).rejects.toThrow( + vaultsErrors.ErrorVaultsRecursive, + ); + }); + test('deleting a directory with force', async () => { + const secretPath = path.join(dirName, secretName); + await writeSecret(secretPath, secretContent); + await vaultOps.deleteSecret(vault, dirName, { recursive: true }); + await expectDirExistsNot(dirName); + }); + test('deleting a secret that does not exist should fail', async () => { + await expect(vaultOps.deleteSecret(vault, secretName)).rejects.toThrow( + vaultsErrors.ErrorSecretsSecretUndefined, + ); + }); }); - test('listing secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const content = 'CONTENT, LIKE AND SUBSCRIBE.'; - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); + describe('mkdir', () => { + test('can create directory', async () => { + await vaultOps.mkdir(vault, dirName); + await expectDirExists(dirName); + }); + test('can create recursive directory', async () => { + const dirPath = path.join(dirName, dirName); + await vaultOps.mkdir(vault, dirPath, { recursive: true }); + await expectDirExists(dirPath); + }); + test('creating recursive directory fails without recursive set', async () => { + const dirPath = path.join(dirName, dirName); + await expect(vaultOps.mkdir(vault, dirPath)).rejects.toThrow( + vaultsErrors.ErrorVaultsRecursive, + ); + await expectDirExistsNot(dirPath); + }); + test('creating existing directory should fail', async () => { + await mkdir(dirName); + await expect(vaultOps.mkdir(vault, dirName)).rejects.toThrow( + vaultsErrors.ErrorSecretsSecretDefined, + ); + }); + test('creating existing secret should fail', async () => { + await writeSecret(secretName, secretContent); + await expect(vaultOps.mkdir(vault, secretName)).rejects.toThrow( + vaultsErrors.ErrorSecretsSecretDefined, + ); + }); + }); + describe('addSecretDirectory', () => { + test('adding a directory of 1 secret', async () => { + const secretDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'secret-directory-'), + ); + const secretDirName = path.basename(secretDir); + const name = 'secret'; + const content = keysUtils.getRandomBytes(5); await fs.promises.writeFile(path.join(secretDir, name), content); - } - await vaultOps.addSecretDirectory(vault, secretDir, fs); + await vaultOps.addSecretDirectory(vault, secretDir, fs); + await expect( + vault.readF((efs) => efs.readdir(secretDirName)), + ).resolves.toContain('secret'); - expect((await vaultOps.listSecrets(vault)).sort()).toStrictEqual( - [ - path.join(secretDirName, `secret 0`), - path.join(secretDirName, `secret 1`), - path.join(secretDirName, `secret 2`), - path.join(secretDirName, `secret 3`), - path.join(secretDirName, `secret 4`), - path.join(secretDirName, `secret 5`), - path.join(secretDirName, `secret 6`), - path.join(secretDirName, `secret 7`), - path.join(secretDirName, `secret 8`), - path.join(secretDirName, `secret 9`), - ].sort(), - ); + await fs.promises.rm(secretDir, { + force: true, + recursive: true, + }); + }); + test('adding a directory with subdirectories and files', async () => { + const secretDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'secret-directory-'), + ); + const secretDirName = path.basename(secretDir); + await fs.promises.mkdir(path.join(secretDir, 'dir1')); + await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); + await fs.promises.mkdir(path.join(secretDir, 'dir3')); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, + await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); + await fs.promises.writeFile( + path.join(secretDir, 'dir1', 'secret2'), + 'secret2', + ); + await fs.promises.writeFile( + path.join(secretDir, 'dir1', 'dir2', 'secret3'), + 'secret3', + ); + await fs.promises.writeFile( + path.join(secretDir, 'dir3', 'secret4'), + 'secret4', + ); + await fs.promises.writeFile( + path.join(secretDir, 'dir3', 'secret5'), + 'secret5', + ); + + await vaultOps.addSecretDirectory(vault, path.join(secretDir), fs); + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual( + [ + path.join(secretDirName, 'secret1'), + path.join(secretDirName, 'dir1', 'secret2'), + path.join(secretDirName, 'dir1', 'dir2', 'secret3'), + path.join(secretDirName, 'dir3', 'secret4'), + path.join(secretDirName, 'dir3', 'secret5'), + ].sort(), + ); + + await fs.promises.rm(secretDir, { + force: true, + recursive: true, + }); + }); + test('testing the errors handling of adding secret directories', async () => { + const secretDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'secret-directory-'), + ); + const secretDirName = path.basename(secretDir); + await fs.promises.mkdir(path.join(secretDir, 'dir1')); + await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); + await fs.promises.mkdir(path.join(secretDir, 'dir3')); + await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); + await fs.promises.writeFile( + path.join(secretDir, 'dir1', 'secret2'), + 'secret2', + ); + await fs.promises.writeFile( + path.join(secretDir, 'dir1', 'dir2', 'secret3'), + 'secret3', + ); + await fs.promises.writeFile( + path.join(secretDir, 'dir3', 'secret4'), + 'secret4', + ); + await fs.promises.writeFile( + path.join(secretDir, 'dir3', 'secret5'), + 'secret5', + ); + + await vaultOps.mkdir(vault, secretDirName, { recursive: true }); + await vaultOps.addSecret( + vault, + path.join(secretDirName, 'secret1'), + 'blocking-secret', + ); + await vaultOps.addSecretDirectory(vault, secretDir, fs); + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual( + [ + path.join(secretDirName, 'secret1'), + path.join(secretDirName, 'dir1', 'secret2'), + path.join(secretDirName, 'dir1', 'dir2', 'secret3'), + path.join(secretDirName, 'dir3', 'secret4'), + path.join(secretDirName, 'dir3', 'secret5'), + ].sort(), + ); + + await fs.promises.rm(secretDir, { + force: true, + recursive: true, + }); + }); + }); + describe('listSecrets', () => { + test('can list secrets', async () => { + const secretName1 = `${secretName}1`; + const secretName2 = `${secretName}2`; + await writeSecret(secretName1, secretContent); + await writeSecret(secretName2, secretContent); + + const secretList = await vaultOps.listSecrets(vault); + expect(secretList).toInclude(secretName1); + expect(secretList).toInclude(secretName2); + }); + test('empty directories are not listed', async () => { + const dirName1 = `${dirName}1`; + const dirName2 = `${dirName}2`; + await mkdir(dirName1); + await mkdir(dirName2); + + const secretList = await vaultOps.listSecrets(vault); + expect(secretList).toHaveLength(0); + }); + test('secrets in directories are listed', async () => { + const secretPath1 = path.join(dirName, `${secretName}1`); + const secretPath2 = path.join(dirName, `${secretName}2`); + await writeSecret(secretPath1, secretContent); + await writeSecret(secretPath2, secretContent); + + const secretList = await vaultOps.listSecrets(vault); + expect(secretList).toInclude(secretPath1); + expect(secretList).toInclude(secretPath2); + }); + test('empty vault list no secrets', async () => { + const secretList = await vaultOps.listSecrets(vault); + expect(secretList).toHaveLength(0); }); }); test('adding hidden files and directories', async () => { @@ -357,168 +525,4 @@ describe('VaultOps', () => { }, globalThis.defaultTimeout * 4, ); - test('adding a directory of 1 secret', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - const name = 'secret'; - const content = keysUtils.getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - - await vaultOps.addSecretDirectory(vault, secretDir, fs); - await expect( - vault.readF((efs) => efs.readdir(secretDirName)), - ).resolves.toContain('secret'); - - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding a directory with subdirectories and files', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - - await vaultOps.addSecretDirectory(vault, path.join(secretDir), fs); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('testing the errors handling of adding secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - - await vaultOps.mkdir(vault, secretDirName, { recursive: true }); - await vaultOps.addSecret( - vault, - path.join(secretDirName, 'secret1'), - 'blocking-secret', - ); - await vaultOps.addSecretDirectory(vault, secretDir, fs); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test( - 'adding a directory of 100 secrets with some secrets already existing', - async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 50; i++) { - const name = 'secret ' + i.toString(); - const content = 'this is secret ' + i.toString(); - await fs.promises.writeFile( - path.join(secretDir, name), - Buffer.from(content), - ); - } - - await vaultOps.mkdir(vault, secretDirName, { recursive: false }); - await vaultOps.addSecret( - vault, - path.join(secretDirName, 'secret 8'), - 'secret-content', - ); - await vaultOps.addSecret( - vault, - path.join(secretDirName, 'secret 9'), - 'secret-content', - ); - await vaultOps.addSecretDirectory(vault, secretDir, fs); - - for (let j = 0; j < 8; j++) { - await expect( - vault.readF((efs) => efs.readdir(secretDirName)), - ).resolves.toContain('secret ' + j.toString()); - } - expect( - ( - await vaultOps.getSecret(vault, path.join(secretDirName, 'secret 8')) - ).toString(), - ).toStrictEqual('this is secret 8'); - expect( - ( - await vaultOps.getSecret(vault, path.join(secretDirName, 'secret 9')) - ).toString(), - ).toStrictEqual('this is secret 9'); - - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }, - globalThis.defaultTimeout * 5, - ); }); diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 9ec256385..e0725455d 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -19,7 +19,6 @@ describe('Vaults utils', () => { path.join(os.tmpdir(), 'polykey-test-'), ); }); - afterEach(async () => { await fs.promises.rm(dataDir, { force: true,