diff --git a/src/client/callers/vaultsSecretsGetFileTree.ts b/src/client/callers/vaultsSecretsGetFileTree.ts index cb1469349..f3b024ba6 100644 --- a/src/client/callers/vaultsSecretsGetFileTree.ts +++ b/src/client/callers/vaultsSecretsGetFileTree.ts @@ -1,5 +1,12 @@ -import { RawCaller } from '@matrixai/rpc'; +import type { HandlerTypes } from '@matrixai/rpc'; +import type VaultsSecretsGetFileTree from '../handlers/VaultsSecretsGetFileTree'; +import { ServerCaller } from '@matrixai/rpc'; -const vaultsSecretsGetFileTree = new RawCaller(); +type CallerTypes = HandlerTypes; + +const vaultsSecretsGetFileTree = new ServerCaller< + CallerTypes['input'], + CallerTypes['output'] +>(); export default vaultsSecretsGetFileTree; diff --git a/src/client/handlers/VaultsSecretsGetFileTree.ts b/src/client/handlers/VaultsSecretsGetFileTree.ts index cd05a4b7c..7ae493385 100644 --- a/src/client/handlers/VaultsSecretsGetFileTree.ts +++ b/src/client/handlers/VaultsSecretsGetFileTree.ts @@ -1,140 +1,56 @@ import type { DB } from '@matrixai/db'; -import type { JSONObject, JSONRPCRequest } from '@matrixai/rpc'; import type VaultManager from '../../vaults/VaultManager'; -import { ReadableStream } from 'stream/web'; -import { RawHandler } from '@matrixai/rpc'; +import type { ClientRPCRequestParams, ClientRPCResponseResult } from '../types'; +import type { TreeNode } from '../../vaults/types'; +import type { SecretFilesMessage } from '../types'; +import { ServerHandler } from '@matrixai/rpc'; +import { fileTree } from '../../vaults'; import * as vaultsUtils from '../../vaults/utils'; import * as vaultsErrors from '../../vaults/errors'; -import * as utils from '../../utils'; -import { fileTree } from '../../vaults'; -import { validateSync } from '../../validation'; -import * as validationErrors from '../../validation/errors'; -class VaultsSecretsGetFileTree extends RawHandler<{ - vaultManager: VaultManager; - db: DB; -}> { - public handle = async ( - input: [JSONRPCRequest, ReadableStream], +class VaultsSecretsGetFileTree extends ServerHandler< + { + vaultManager: VaultManager; + db: DB; + }, + ClientRPCRequestParams, + ClientRPCResponseResult +> { + public async *handle( + input: ClientRPCRequestParams, _cancel: any, - ): Promise<[JSONObject, ReadableStream]> => { + ): AsyncGenerator, void, void> { const { vaultManager, db } = this.container; - const [headerMessage, _] = input; - - const params = headerMessage.params; - if (params == null || !utils.isObject(params)) utils.never(); - - // TEST: testing needed - function stringParser(value: any): string { - if (typeof value !== 'string') { - throw new validationErrors.ErrorParse( - 'Provided value must be a string', - ); - } - return value as string; - } - - function globPatternParser(value: any): string { - const allowedCharacters = /^[\w\s\-./*?[\]{}]*$/; - const invalidCharacters = /[\0:]/; - if (!value) { - throw new validationErrors.ErrorParse('Glob pattern must not be empty'); - } - if (typeof value !== 'string') { - throw new validationErrors.ErrorParse( - 'Glob pattern must be must be a string', - ); - } - if (!allowedCharacters.test(value) || invalidCharacters.test(value)) { - throw new validationErrors.ErrorParse('Glob pattern is not valid'); - } - return value as string; - } - - function booleanParser(value: any): boolean { - if (typeof value !== 'boolean') { - throw new validationErrors.ErrorParse( - 'Provided value must be a string', - ); - } - return value as boolean; - } - - const { - vaultNameOrId, - pattern, - yieldStats, - yieldRoot, - yieldFiles, - yieldParents, - yieldDirectories, - }: { - vaultNameOrId: string; - pattern: string; - yieldStats: boolean; - yieldRoot: boolean; - yieldFiles: boolean; - yieldParents: boolean; - yieldDirectories: boolean; - } = validateSync( - (keyPath, value) => { - return utils.matchSync(keyPath)( - [['vaultNameOrId'], () => stringParser(value)], - [['pattern'], () => globPatternParser(value)], - [ - [ - 'yieldStats', - 'yieldRoot', - 'yieldFiles', - 'yieldParents', - 'yieldDirectories', - ], - () => booleanParser(value), - ], - () => value, - ); - }, - { - vaultNameOrId: params.vaultNameOrId, - pattern: params.pattern, - yieldStats: params.yieldStats, - yieldRoot: params.yieldRoot, - yieldFiles: params.yieldFiles, - yieldParents: params.yieldParents, - yieldDirectories: params.yieldDirectories, - }, - ); - const vaultId = await db.withTransactionF(async (tran) => { const vaultIdFromName = await vaultManager.getVaultId( - vaultNameOrId, + input.nameOrId, tran, ); const vaultId = - vaultIdFromName ?? vaultsUtils.decodeVaultId(vaultNameOrId); + vaultIdFromName ?? vaultsUtils.decodeVaultId(input.nameOrId); if (vaultId == null) throw new vaultsErrors.ErrorVaultsVaultUndefined(); return vaultId; }); - const filesData = vaultManager.withVaultsG([vaultId], (vault) => { - return vault.readG((fs): AsyncGenerator => { - const fileTreeGen = fileTree.globWalk({ + yield* vaultManager.withVaultsG([vaultId], (vault) => { + return vault.readG(async function* (fs): AsyncGenerator< + TreeNode, + void, + void + > { + yield* fileTree.globWalk({ fs: fs, basePath: '.', - pattern: pattern, - yieldStats: yieldStats, - yieldRoot: yieldRoot, - yieldFiles: yieldFiles, - yieldParents: yieldParents, - yieldDirectories: yieldDirectories, + pattern: input.pattern, + yieldStats: input.yieldStats, + yieldRoot: input.yieldRoot, + yieldFiles: input.yieldFiles, + yieldParents: input.yieldParents, + yieldDirectories: input.yieldDirectories, }); - return fileTree.serializerStreamFactory(fs, fileTreeGen, false); }); }); - - const filesDataStream = utils.asyncGeneratorToReadableStream(filesData); - return [{}, filesDataStream]; - }; + } } export default VaultsSecretsGetFileTree; diff --git a/src/client/types.ts b/src/client/types.ts index dc7bb8f44..2ff158abf 100644 --- a/src/client/types.ts +++ b/src/client/types.ts @@ -301,11 +301,6 @@ type VaultsLatestVersionMessage = { // Secrets -// NOTE: see if we still need this. probably dont, but just be sure. -type SecretFilesList = { - data: string; -}; - type SecretFilesMessage = VaultIdentifierMessage & { pattern: string; yieldStats: boolean; @@ -426,7 +421,6 @@ export type { VaultsScanMessage, VaultsVersionMessage, VaultsLatestVersionMessage, - SecretFilesList, SecretNameMessage, SecretIdentifierMessage, SecretFilesMessage, diff --git a/src/utils/utils.ts b/src/utils/utils.ts index 7154d7a80..6e65acc6a 100644 --- a/src/utils/utils.ts +++ b/src/utils/utils.ts @@ -10,12 +10,11 @@ import os from 'os'; import process from 'process'; import path from 'path'; import nodesEvents from 'events'; -import lexi from 'lexicographic-integer'; import { ReadableStream } from 'stream/web'; +import lexi from 'lexicographic-integer'; import { PromiseCancellable } from '@matrixai/async-cancellable'; import { timedCancellable } from '@matrixai/contexts/dist/functions'; import * as utilsErrors from './errors'; -import { JSONObject } from '@matrixai/rpc'; const AsyncFunction = (async () => {}).constructor; const GeneratorFunction = function* () {}.constructor; diff --git a/src/vaults/fileTree.ts b/src/vaults/fileTree.ts index 8a2a1d483..697e37484 100644 --- a/src/vaults/fileTree.ts +++ b/src/vaults/fileTree.ts @@ -12,7 +12,7 @@ import type { HeaderContent, } from './types'; import path from 'path'; -import { ReadableStream, TransformStream } from 'stream/web'; +import { TransformStream } from 'stream/web'; import { minimatch } from 'minimatch'; import { JSONParser, TokenizerError } from '@streamparser/json'; import * as vaultsUtils from './utils'; @@ -532,8 +532,17 @@ function parserTransformStreamFactory(): TransformStream< }; jsonParser.write(initialChunk); }; + let processed: boolean = false; return new TransformStream({ + flush: (controller) => { + if (!processed) { + controller.error( + new validationErrors.ErrorParse('Stream ended prematurely'), + ); + } + }, transform: (chunk, controller) => { + if (chunk.byteLength > 0) processed = true; switch (phase) { case 'START': { workingBuffer = vaultsUtils.uint8ArrayConcat([workingBuffer, chunk]); diff --git a/tests/client/handlers/vaults.test.ts b/tests/client/handlers/vaults.test.ts index cd4b84d03..9ba69942a 100644 --- a/tests/client/handlers/vaults.test.ts +++ b/tests/client/handlers/vaults.test.ts @@ -70,8 +70,6 @@ import * as vaultsUtils from '@/vaults/utils'; import * as vaultsErrors from '@/vaults/errors'; import * as networkUtils from '@/network/utils'; import * as testsUtils from '../../utils'; -import { fileTree } from '@/vaults'; -import { ContentNode, TreeNode } from '@/vaults/types'; describe('vaultsClone', () => { const logger = new Logger('vaultsClone test', LogLevel.WARN, [ @@ -1592,7 +1590,7 @@ describe('vaultsSecretsNewDir and vaultsSecretsList', () => { // List secrets with names of directories const secrets = await rpcClient.methods.vaultsSecretsGetFileTree({ - vaultNameOrId: vaultsIdEncoded, + nameOrId: vaultsIdEncoded, pattern: '**/*', yieldStats: false, yieldRoot: false, @@ -1601,21 +1599,14 @@ describe('vaultsSecretsNewDir and vaultsSecretsList', () => { yieldDirectories: true, }); - // Parse secrets - const secretDataStream = secrets.readable; - const parserTransform = fileTree.parserTransformStreamFactory(); - const parsedFilesStream = secretDataStream.pipeThrough(parserTransform); - // Extract secret file paths const parsedFiles: Array = []; - for await (const file of parsedFilesStream) { - if ('path' in file) { - parsedFiles.push(file.path); - } + for await (const file of secrets) { + parsedFiles.push(file.path); } expect(parsedFiles).toIncludeAllMembers([ ...secretList.map((secret) => path.join('secretDir', secret)), - 'secretDir' + 'secretDir', ]); }); }); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index 86c87bfb9..98e9e446b 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -16,8 +16,8 @@ import * as vaultOps from '@/vaults/VaultOps'; import * as vaultsErrors from '@/vaults/errors'; import * as vaultsUtils from '@/vaults/utils'; import * as keysUtils from '@/keys/utils'; -import * as testNodesUtils from '../nodes/utils'; import * as utils from '@/utils'; +import * as testNodesUtils from '../nodes/utils'; describe('VaultOps', () => { const logger = new Logger('VaultOps', LogLevel.WARN, [new StreamHandler()]); diff --git a/tests/vaults/fileTree.test.ts b/tests/vaults/fileTree.test.ts index 7bae0e97a..32da7340b 100644 --- a/tests/vaults/fileTree.test.ts +++ b/tests/vaults/fileTree.test.ts @@ -6,8 +6,8 @@ import { ReadableStream } from 'stream/web'; import { test } from '@fast-check/jest'; import fc from 'fast-check'; import * as fileTree from '@/vaults/fileTree'; -import * as vaultsTestUtils from './utils'; import * as utils from '@/utils'; +import * as vaultsTestUtils from './utils'; describe('fileTree', () => { let dataDir: string; @@ -502,7 +502,8 @@ describe('fileTree', () => { fileTreeGen, false, ); - const serializedStream = utils.asyncGeneratorToReadableStream(serializedGen); + const serializedStream = + utils.asyncGeneratorToReadableStream(serializedGen); const outputStream = serializedStream.pipeThrough(parserTransform); for await (const output of outputStream) { data.push(output); @@ -549,7 +550,8 @@ describe('fileTree', () => { fileTreeGen, false, ); - const serializedStream = utils.asyncGeneratorToReadableStream(serializedGen); + const serializedStream = + utils.asyncGeneratorToReadableStream(serializedGen); const outputStream = serializedStream .pipeThrough(snipperTransform) .pipeThrough(parserTransform); @@ -667,7 +669,8 @@ describe('fileTree', () => { fileTreeGen, true, ); - const serializedStream = utils.asyncGeneratorToReadableStream(serializedGen); + const serializedStream = + utils.asyncGeneratorToReadableStream(serializedGen); const outputStream = serializedStream.pipeThrough(parserTransform); for await (const output of outputStream) { data.push(output);