diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 97080aa32..377ae376d 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -164,49 +164,33 @@ class VaultInternal { efs, logger, }); - // This error flag will contain the error returned by the cloning rpc stream - let error; // Make the directory where the .git files will be auto generated and // where the contents will be cloned to ('contents' file) await efs.mkdir(vault.vaultDataDir, { recursive: true }); - let vaultName: VaultName; - let remoteVaultId: VaultId; - let remote: RemoteInfo; - try { - [vaultName, remoteVaultId] = await nodeManager.withConnF( - targetNodeId, - async (connection) => { - const client = connection.getClient(); + const [vaultName, remoteVaultId]: [VaultName, VaultId] = + await nodeManager.withConnF(targetNodeId, async (connection) => { + const client = connection.getClient(); - const [request, vaultName, remoteVaultId] = await vault.request( - client, - targetVaultNameOrId, - 'clone', - ); - await git.clone({ - fs: efs, - http: { request }, - dir: vault.vaultDataDir, - gitdir: vault.vaultGitDir, - url: 'http://', - singleBranch: true, - ref: vaultsUtils.canonicalBranchRef, - }); - return [vaultName, remoteVaultId]; - }, - ); - remote = { - remoteNode: nodesUtils.encodeNodeId(targetNodeId), - remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), - }; - } catch (e) { - // If the error flag set, and we have the generalised SmartHttpError from - // isomorphic git then we need to throw the polykey error - if (e instanceof git.Errors.SmartHttpError && error) { - throw error; - } - throw e; - } + const [request, vaultName, remoteVaultId] = await vault.request( + client, + targetVaultNameOrId, + 'clone', + ); + await git.clone({ + fs: efs, + http: { request }, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + url: 'http://', + singleBranch: true, + ref: vaultsUtils.canonicalBranchRef, + }); + return [vaultName, remoteVaultId]; + }); + const remote: RemoteInfo = { + remoteNode: nodesUtils.encodeNodeId(targetNodeId), + remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), + }; await vault.start({ vaultName, tran }); // Setting the remote in the metadata @@ -579,8 +563,6 @@ class VaultInternal { ); } - // This error flag will contain the error returned by the cloning rpc stream - let error; // Keeps track of whether the metadata needs changing to avoid unnecessary db ops // 0 = no change, 1 = change with vault ID, 2 = change with vault name let metaChange = 0; @@ -641,17 +623,15 @@ class VaultInternal { return remoteVaultId; }, ); - } catch (err) { + } catch (e) { // If the error flag set, and we have the generalised SmartHttpError from // isomorphic git then we need to throw the polykey error - if (err instanceof git.Errors.SmartHttpError && error) { - throw error; - } else if (err instanceof git.Errors.MergeNotSupportedError) { - throw new vaultsErrors.ErrorVaultsMergeConflict(err.message, { - cause: err, + if (e instanceof git.Errors.MergeNotSupportedError) { + throw new vaultsErrors.ErrorVaultsMergeConflict(e.message, { + cause: e, }); } - throw err; + throw e; } if (metaChange !== 0) { if (metaChange === 2) { diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 22f45305e..4618661b2 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -806,7 +806,8 @@ class VaultManager { tran?: DBTransaction, ): AsyncGenerator { if (tran == null) { - const handleInfoRequest = (tran) => this.handleInfoRequest(vaultId, tran); + const handleInfoRequest = (tran: DBTransaction) => + this.handleInfoRequest(vaultId, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleInfoRequest(tran); }); @@ -907,7 +908,8 @@ class VaultManager { }> { if (tran == null) { // Lambda to maintain `this` context - const handleScanVaults = (tran) => this.handleScanVaults(nodeId, tran); + const handleScanVaults = (tran: DBTransaction) => + this.handleScanVaults(nodeId, tran); return yield* this.db.withTransactionG(async function* (tran) { return yield* handleScanVaults(tran); }); diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index 42f70325d..449cd4adb 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -55,9 +55,9 @@ async function* readDirRecursively( fs, dir = '.', ): AsyncGenerator { - const dirents = await fs.promises.readdir(dir); - for (const dirent of dirents) { - const res = path.join(dir, dirent.toString()); + const dirEntries = await fs.promises.readdir(dir); + for (const dirEntry of dirEntries) { + const res = path.join(dir, dirEntry.toString()); const stat = await fs.promises.stat(res); if (stat.isDirectory()) { yield* readDirRecursively(fs, res); diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index b125fa2eb..a4d7ca88b 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -173,25 +173,25 @@ describe('VaultInternal', () => { }); expect(files).toEqual([]); await vault.writeF(async (efs) => { - await efs.writeFile('test', 'testdata'); + await efs.writeFile('test', 'testData'); }); commit = (await vault.log(undefined, 1))[0]; await vault.version(commit.commitId); const file = await vault.readF(async (efs) => { return await efs.readFile('test', { encoding: 'utf8' }); }); - expect(file).toBe('testdata'); + expect(file).toBe('testData'); }); test('can change commits and preserve the log with no intermediate vault mutation', async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); const endCommit = (await vault.log(undefined, 1))[0].commitId; await vault.version(initCommit); @@ -206,24 +206,24 @@ describe('VaultInternal', () => { expect(files).toEqual(['test1', 'test2', 'test3']); }); test('does not allow changing to an unrecognised commit', async () => { - await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( + await expect(() => vault.version('unrecognisedCommit')).rejects.toThrow( vaultsErrors.ErrorVaultReferenceInvalid, ); await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); const secondCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); const fourthCommit = (await vault.log(undefined, 1))[0].commitId; await vault.version(secondCommit); await vault.writeF(async (efs) => { const fd = await efs.open('test3', 'w'); - await efs.write(fd, 'testdata6', 3, 6); + await efs.write(fd, 'testData6', 3, 6); await efs.close(fd); }); await expect(vault.version(fourthCommit)).rejects.toThrow( @@ -233,13 +233,13 @@ describe('VaultInternal', () => { test('can change to the latest commit', async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); await vault.version(initCommit); await vault.version(tagLast); @@ -259,18 +259,18 @@ describe('VaultInternal', () => { async () => { const initCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); const secondCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); await vault.version(secondCommit); await vault.writeF(async (efs) => { - await efs.writeFile('test4', 'testdata4'); + await efs.writeFile('test4', 'testData4'); }); let files = await vault.readF(async (efs) => { return await efs.readdir('.'); @@ -474,7 +474,7 @@ describe('VaultInternal', () => { // Converting a vault to the interface const vaultInterface = vault as Vault; - // Using the avaliable functions + // Using the available functions await vaultInterface.writeF(async (efs) => { await efs.writeFile('test', 'testContent'); }); @@ -516,7 +516,7 @@ describe('VaultInternal', () => { expect(files).toEqual([]); await expect( vault.writeF(async (efs) => { - await efs.writeFile('test', 'testdata'); + await efs.writeFile('test', 'testData'); }), ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); }); @@ -524,20 +524,20 @@ describe('VaultInternal', () => { 'cannot checkout old commits after branching commit', async () => { await vault.writeF(async (efs) => { - await efs.writeFile('test1', 'testdata1'); + await efs.writeFile('test1', 'testData1'); }); const secondCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test2', 'testdata2'); + await efs.writeFile('test2', 'testData2'); }); const thirdCommit = (await vault.log(undefined, 1))[0].commitId; await vault.writeF(async (efs) => { - await efs.writeFile('test3', 'testdata3'); + await efs.writeFile('test3', 'testData3'); }); const fourthCommit = (await vault.log(undefined, 1))[0].commitId; await vault.version(secondCommit); await vault.writeF(async (efs) => { - await efs.writeFile('test4', 'testdata4'); + await efs.writeFile('test4', 'testData4'); }); await expect(() => { return vault.version(thirdCommit); @@ -595,7 +595,7 @@ describe('VaultInternal', () => { const vaultEFS = vault.efs; const log = await vault.log(); const ref = log[1].commitId; - await efs.writeFile(path.join(vault.vaultDataDir, 'newfile1'), 'hello'); + await efs.writeFile(path.join(vault.vaultDataDir, 'newFile1'), 'hello'); const newRef1 = await git.commit({ fs: vaultEFS, dir: vault.vaultDataDir, @@ -607,7 +607,7 @@ describe('VaultInternal', () => { message: 'test', ref: ref, }); - await efs.writeFile(path.join(vault.vaultDataDir, 'newfile2'), 'world'); + await efs.writeFile(path.join(vault.vaultDataDir, 'newFile2'), 'world'); const newRef2 = await git.commit({ fs: vaultEFS, dir: vault.vaultDataDir, diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index fde02fb09..ea34f63eb 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -326,7 +326,6 @@ describe('VaultOps', () => { ); }); }); - // TODO: fix this up. it's an annoying test describe('addSecretDirectory', () => { test('adding a directory of 1 secret', async () => { const secretDir = await fs.promises.mkdtemp( @@ -476,7 +475,6 @@ describe('VaultOps', () => { expect(secretList).toHaveLength(0); }); }); - // Not sure if hidden file names are a special case but I'm keeping the tests just in case test('adding hidden files and directories', async () => { await vaultOps.addSecret(vault, '.hiddenSecret', 'hidden_contents'); await vaultOps.mkdir(vault, '.hiddenDir', { recursive: true });