diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 75c87dc..8bbab6d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ on: - '**' env: - BEE_VERSION: '1.1.0' + BEE_VERSION: '1.4.1' BLOCKCHAIN_VERSION: '1.2.0' BEE_ENV_PREFIX: 'swarm-test' BEE_IMAGE_PREFIX: 'docker.pkg.github.com/ethersphere/bee-factory' diff --git a/README.md b/README.md index 8a7f3d8..715ac76 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,26 @@ # Description +The `mantaray` data-structure is widely used whithin the Swarm ecosystem; just to mention one, `manifests` are built on the mantaray data-structure, which consist of paths and file mappings of all dApps live on Ethereum Swarm. + With this package you can manipulate and interpret mantaray data via `MantarayNode` and `MantarayFork` abstractions. # Exported Functions and Classes You can import the followings directly from `mantaray-js`: +* MantarayV0_2 # legacy implementation of the mantaray data-structure, supported by the Bee client. +* MantarayV1 # recent implementation of the mantaray data-structure, currently not supported by the Bee client. This codebase is the reference implementation for [the Mantaray 1.0 SWIP](https://github.com/ethersphere/SWIPs/pull/37). +* initManifestNode # initialize a manifest node (either `0.2` or `1.0`) +* Utils # all used utility functions in the library. Mostly operating on `Uint8Array` objects. +* types* # not callable, referring all types exported and reachable from the index + +The `MantarayV1` and `MantarayV0_2` versions have similar exposables: + * MantarayNode # class abstracting and manipulating Mantaray Node data * MantarayFork # class abstracting and manipulating Mantaray Fork data -* checkForSeparator # checks for separator character in the node and its descendants prefixes -* initManifestNode # initialize a manifest node * loadAllNodes # loads all mantaray nodes recursively from the storage -* equalNodes # checks whether the two given Mantaray Nodes objects are equal in the in-memory abstraction level -* Utils # all used utility functions in the library. Mostly operating on `Uint8Array` objects. -* types* # not callable, referring all types exported and reachable from the index +* checkForSeparator # _(only v0.2)_ checks for separator character in the node and its descendants prefixes +* equalNodes # _(only v0.2)_ checks whether the two given Mantaray Node objects are equal in the in-memory abstraction level # Basic usage @@ -22,22 +29,36 @@ You can import the followings directly from `mantaray-js`: ```ts import { initManifestNode, Utils } from 'mantaray-js' -const node = initManifestNode() -const address1 = Utils.gen32Bytes() // instead of `gen32Bytes` some 32 bytes identifier that later could be retrieved from the storage -const address2 = Utils.gen32Bytes() -const address3 = Utils.gen32Bytes() -const address4 = Utils.gen32Bytes() -const address5 = Utils.gen32Bytes() +const node = initManifestNode() // by default it gives back 1.0 version of Mantaray +const address1 = Utils.hexToBytes<32>('4a07606f59562544dd37d26a219a65144e8cf3321b21276d8ea8de4af3ecee63') +const address2 = Utils.hexToBytes<32>('0bf983d3bf7d46afad391856f302805cea6d1bdb2df0341a00ae29db42b1eb45') +const address3 = Utils.hexToBytes<32>('5b5a1de0cdbf277446bdfc2b5f03ef12e5da8dfbd5d74ea608b0ff5544d584bd') +const address4 = Utils.hexToBytes<32>('4f64abff074c90d37c82e3e21e4d18fee52eb887a8b163eab167248e1197459e') +const address5 = Utils.hexToBytes<32>('0d7d218dfce224c1b53d7af8fd9cf88e7f053fe978716a768a88a853bd5f1bc7') const path1 = new TextEncoder().encode('path1/valami/elso') const path2 = new TextEncoder().encode('path1/valami/masodik') const path3 = new TextEncoder().encode('path1/valami/masodik.ext') const path4 = new TextEncoder().encode('path1/valami') const path5 = new TextEncoder().encode('path2') -node.addFork(path1, address1) -node.addFork(path2, address2, { vmi: 'elso' }) // here 'vmi' is a key of metadata and 'elso' is its value -node.addFork(path3, address3) -node.addFork(path4, address4, { vmi: 'negy' }) -node.addFork(path5, address5) +node.addFork(path1, { + entry: address1, // keccak256 hash of any content that can be load from Storage. it acts as reference on the Path. +}) +node.addFork(path2, { + entry: address2, + nodeMetadata: { vmi: 'elso' } // JSON metadata about the node that will be serialized on node level +}) +node.addFork(path3, { + entry: address3, + forkMetadata: { vmi2: 'masodik' } // JSON metadata about the node that will be serialized on fork level +}) +node.addFork(path4, { + entry: address4, + nodeMetadata: { vmi3: '3' }, + forkMetadata: { vmi3: 'harmadik', vmi: '3!' } +}) +node.addFork(path5, { + nodeMetadata: { metadataAboutPath: 'it is not necessary to save entry for the new node' } +}) node.removePath(path3) // (...) ``` @@ -45,9 +66,9 @@ node.removePath(path3) ## Mantaray Storage Operations ```ts -import { MantarayNode } from 'mantaray-js' +import { MantarayV1 } from 'mantaray-js' -const node = new MantarayNode() +const node = new MantarayV1.MantarayNode() // here `reference` parameter is a `Reference` type which can be a 32 or 64 bytes Uint8Array // and `loadFunction` is a [loadFunction: async (address: Reference): Promise] typed function // that returns the serialised raw data of a MantarayNode of the given reference @@ -68,9 +89,9 @@ The following describes the format of a node binary format. ┌────────────────────────────────┐ │ obfuscationKey <32 byte> │ ├────────────────────────────────┤ -│ hash("mantaray:0.1") <31 byte> │ +│ hash("mantaray:1.0") <31 byte> │ ├────────────────────────────────┤ -│ refBytesSize <1 byte> │ +│ nodeFeatures <1 byte> │ ├────────────────────────────────┤ │ entry <32/64 byte> │ ├────────────────────────────────┤ @@ -81,36 +102,27 @@ The following describes the format of a node binary format. │ ├────────────────────────────┤ │ │ │ ... │ │ │ ├────────────────────────────┤ │ -│ │ Fork N │ │ +│ │ Fork N │ │ -> where N maximum is 256 │ └────────────────────────────┘ │ +├────────────────────────────────┤ +│ nodeMetadata │ └────────────────────────────────┘ ``` ## Fork ``` -┌───────────────────┬───────────────────────┬──────────────────┐ -│ nodeType <1 byte> │ prefixLength <1 byte> │ prefix <30 byte> │ -├───────────────────┴───────────────────────┴──────────────────┤ -│ reference <32/64 bytes> │ -│ │ +┌───────────────────────────────┬──────────────────────────────┐ +│ prefixLength <1 byte> │ prefix <31 byte> │ +├───────────────────────────────┴──────────────────────────────┤ +│ reference <32/64 byte> │ +├──────────────────────────────────────────────────────────────┤ +│ forkMetadata │ └──────────────────────────────────────────────────────────────┘ ``` -### Fork with metadata - -``` -┌───────────────────┬───────────────────────┬──────────────────┐ -│ nodeType <1 byte> │ prefixLength <1 byte> │ prefix <30 byte> │ -├───────────────────┴───────────────────────┴──────────────────┤ -│ reference <32/64 bytes> │ -│ │ -├─────────────────────────────┬────────────────────────────────┤ -│ metadataBytesSize <2 bytes> │ metadataBytes │ -├─────────────────────────────┘ │ -│ │ -└──────────────────────────────────────────────────────────────┘ -``` +If `forkMetadataSegmentSize` is 0, then `forkMetadata` is omitted. +`forkMetadata` has the same length for each fork under one Mantaray node. # Testing diff --git a/jest.config.ts b/jest.config.ts index 2165f96..636e86c 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -1,31 +1,10 @@ -/* eslint-disable no-console */ /* * For a detailed explanation regarding each configuration property and type check, visit: * https://jestjs.io/docs/en/configuration.html */ -import { BeeDebug } from '@ethersphere/bee-js' import type { Config } from '@jest/types' export default async (): Promise => { - if (!process.env.BEE_POSTAGE) { - try { - console.log('Creating postage stamps...') - const beeDebugUrl = process.env.BEE_DEBUG_API_URL || 'http://localhost:1635' - const bee = new BeeDebug(beeDebugUrl) - process.env.BEE_POSTAGE = await bee.createPostageBatch('1', 20) - console.log('Queen stamp: ', process.env.BEE_POSTAGE) - // sleep for 11 seconds (10 blocks with ganache block time = 1s) - // needed for postage batches to become usable - // FIXME: sleep should be imported for this, but then we fail with - // Could not find a declaration file for module 'tar-js' - await new Promise(resolve => setTimeout(() => resolve(), 11_000)) - } catch (e) { - // It is possible that for unit tests the Bee nodes does not run - // so we are only logging errors and not leaving them to propagate - console.error(e) - } - } - return { // Indicates whether the coverage information should be collected while executing the test // collectCoverage: false, @@ -47,5 +26,20 @@ export default async (): Promise => { // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped testPathIgnorePatterns: ['/node_modules/'], + + // Run tests from one or more projects + projects: [ + { + displayName: 'node:unit', + testEnvironment: 'node', + testRegex: 'test/unit/((?!\\.browser).)*\\.spec\\.ts', + }, + { + displayName: 'node:integration', + testEnvironment: 'node', + testRegex: 'test/integration/((?!\\.browser).)*\\.spec\\.ts', + globalSetup: '/test/test-setup.ts' + }, + ] as unknown[] as string[], // bad types } } diff --git a/package-lock.json b/package-lock.json index d96e144..18814b4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,6 @@ "version": "1.0.3", "license": "BSD-3-Clause", "dependencies": { - "get-random-values": "^1.2.2", "js-sha3": "^0.8.0" }, "devDependencies": { @@ -20,7 +19,9 @@ "@babel/preset-typescript": "^7.14.5", "@ethersphere/bee-js": "^1.0.0", "@jest/types": "^27.0.6", + "@types/deep-equal": "^1.0.1", "@types/jest": "^26.0.24", + "@types/randombytes": "^2.0.0", "@types/terser-webpack-plugin": "^5.0.4", "@types/webpack-bundle-analyzer": "^4.4.1", "@typescript-eslint/eslint-plugin": "^4.28.3", @@ -2224,6 +2225,12 @@ "@babel/types": "^7.3.0" } }, + "node_modules/@types/deep-equal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@types/deep-equal/-/deep-equal-1.0.1.tgz", + "integrity": "sha512-mMUu4nWHLBlHtxXY17Fg6+ucS/MnndyOWyOe7MmwkoMYxvfQU2ajtRaEvqSUv+aVkMqH/C0NCI8UoVfRNQ10yg==", + "dev": true + }, "node_modules/@types/eslint": { "version": "7.2.14", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-7.2.14.tgz", @@ -2384,6 +2391,15 @@ "integrity": "sha512-eI5Yrz3Qv4KPUa/nSIAi0h+qX0XyewOliug5F2QAtuRg6Kjg6jfmxe1GIwoIRhZspD1A0RP8ANrPwvEXXtRFog==", "dev": true }, + "node_modules/@types/randombytes": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/randombytes/-/randombytes-2.0.0.tgz", + "integrity": "sha512-bz8PhAVlwN72vqefzxa14DKNT8jK/mV66CSjwdVQM/k3Th3EPKfUtdMniwZgMedQTFuywAsfjnZsg+pEnltaMA==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/stack-utils": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", @@ -3707,11 +3723,6 @@ "node": ">=6.0.0" } }, - "node_modules/dom-walk": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", - "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" - }, "node_modules/domexception": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", @@ -4599,17 +4610,6 @@ "node": ">=8.0.0" } }, - "node_modules/get-random-values": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-random-values/-/get-random-values-1.2.2.tgz", - "integrity": "sha512-lMyPjQyl0cNNdDf2oR+IQ/fM3itDvpoHy45Ymo2r0L1EjazeSl13SfbKZs7KtZ/3MDCeueiaJiuOEfKqRTsSgA==", - "dependencies": { - "global": "^4.4.0" - }, - "engines": { - "node": "10 || 12 || >=14" - } - }, "node_modules/get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -4660,15 +4660,6 @@ "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", "dev": true }, - "node_modules/global": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", - "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", - "dependencies": { - "min-document": "^2.19.0", - "process": "^0.11.10" - } - }, "node_modules/globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -6099,14 +6090,6 @@ "node": ">=6" } }, - "node_modules/min-document": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", - "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=", - "dependencies": { - "dom-walk": "^0.1.0" - } - }, "node_modules/minimalistic-assert": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", @@ -6500,14 +6483,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", @@ -9514,6 +9489,12 @@ "@babel/types": "^7.3.0" } }, + "@types/deep-equal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@types/deep-equal/-/deep-equal-1.0.1.tgz", + "integrity": "sha512-mMUu4nWHLBlHtxXY17Fg6+ucS/MnndyOWyOe7MmwkoMYxvfQU2ajtRaEvqSUv+aVkMqH/C0NCI8UoVfRNQ10yg==", + "dev": true + }, "@types/eslint": { "version": "7.2.14", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-7.2.14.tgz", @@ -9661,6 +9642,15 @@ "integrity": "sha512-eI5Yrz3Qv4KPUa/nSIAi0h+qX0XyewOliug5F2QAtuRg6Kjg6jfmxe1GIwoIRhZspD1A0RP8ANrPwvEXXtRFog==", "dev": true }, + "@types/randombytes": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/randombytes/-/randombytes-2.0.0.tgz", + "integrity": "sha512-bz8PhAVlwN72vqefzxa14DKNT8jK/mV66CSjwdVQM/k3Th3EPKfUtdMniwZgMedQTFuywAsfjnZsg+pEnltaMA==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/stack-utils": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", @@ -10692,11 +10682,6 @@ "esutils": "^2.0.2" } }, - "dom-walk": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", - "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==" - }, "domexception": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/domexception/-/domexception-2.0.1.tgz", @@ -11342,14 +11327,6 @@ "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", "dev": true }, - "get-random-values": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-random-values/-/get-random-values-1.2.2.tgz", - "integrity": "sha512-lMyPjQyl0cNNdDf2oR+IQ/fM3itDvpoHy45Ymo2r0L1EjazeSl13SfbKZs7KtZ/3MDCeueiaJiuOEfKqRTsSgA==", - "requires": { - "global": "^4.4.0" - } - }, "get-stream": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", @@ -11385,15 +11362,6 @@ "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", "dev": true }, - "global": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", - "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", - "requires": { - "min-document": "^2.19.0", - "process": "^0.11.10" - } - }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -12499,14 +12467,6 @@ "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true }, - "min-document": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", - "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=", - "requires": { - "dom-walk": "^0.1.0" - } - }, "minimalistic-assert": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", @@ -12799,11 +12759,6 @@ } } }, - "process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" - }, "progress": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", diff --git a/package.json b/package.json index 1f7b1bc..db713b3 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,9 @@ "lint": "eslint --fix \"src/**/*.ts\" && prettier --write \"src/**/*.ts\"", "lint:check": "eslint \"src/**/*.ts\" && prettier --check \"src/**/*.ts\"", "check:types": "tsc --project tsconfig.json --skipLibCheck", - "test": "jest --verbose --config=jest.config.ts" + "test": "jest --verbose --selectProjects=node:unit node:integration --config=jest.config.ts", + "test:unit": "jest --verbose --selectProjects=node:unit --config=jest.config.ts ", + "test:integration": "jest --verbose --selectProjects=node:integration --config=jest.config.ts" }, "keywords": [ "mantaray", @@ -23,7 +25,6 @@ ], "license": "BSD-3-Clause", "dependencies": { - "get-random-values": "^1.2.2", "js-sha3": "^0.8.0" }, "devDependencies": { @@ -34,7 +35,9 @@ "@babel/preset-typescript": "^7.14.5", "@ethersphere/bee-js": "^1.0.0", "@jest/types": "^27.0.6", + "@types/deep-equal": "^1.0.1", "@types/jest": "^26.0.24", + "@types/randombytes": "^2.0.0", "@types/terser-webpack-plugin": "^5.0.4", "@types/webpack-bundle-analyzer": "^4.4.1", "@typescript-eslint/eslint-plugin": "^4.28.3", diff --git a/src/index.ts b/src/index.ts index 749ff2a..fbe5189 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,15 +1,40 @@ -import { MantarayNode } from './node' -import { Bytes } from './types' -import { gen32Bytes } from './utils' +import { MantarayNode as MantarayNodeV0_2 } from './mantaray-v0_2' +import { MantarayNode as MantarayNodeV1 } from './mantaray-v1' +import type { Bytes, MarshalVersion } from './types' -/** On the returned Mantaray node you can set either its entry or add fork to it */ -export function initManifestNode(options?: { obfuscationKey?: Bytes<32> }): MantarayNode { - const manifestNode = new MantarayNode() - manifestNode.setObfuscationKey = options?.obfuscationKey || gen32Bytes() +export function initManifestNode(options?: { + obfuscationKey?: Bytes<32> + version?: Version +}): MantarayNode { + const obfuscationKey: Bytes<32> = options?.obfuscationKey || (new Uint8Array(32) as Bytes<32>) + const version: MarshalVersion = options?.version ? options!.version : '1.0' - return manifestNode + if (version === '0.2') { + const manifestNode0_2 = new MantarayNodeV0_2() + manifestNode0_2.setObfuscationKey = obfuscationKey + + return manifestNode0_2 as MantarayNode + } + + if (version === '1.0') { + const manifestNode1_0 = new MantarayNodeV1() + manifestNode1_0.obfuscationKey = obfuscationKey + + return manifestNode1_0 as MantarayNode + } + + throw new Error('Not implemented') } -export * from './node' +export type MantarayNode = Version extends '0.2' + ? MantarayNodeV0_2 + : Version extends '1.0' + ? MantarayNodeV1 + : Version extends undefined + ? MantarayNodeV0_2 | MantarayNodeV1 + : never + +export * as MantarayV0_2 from './mantaray-v0_2' export * from './types' export * as Utils from './utils' +export * as MantarayV1 from './mantaray-v1' diff --git a/src/node.ts b/src/mantaray-v0_2.ts similarity index 97% rename from src/node.ts rename to src/mantaray-v0_2.ts index 27b8e04..4bc1575 100644 --- a/src/node.ts +++ b/src/mantaray-v0_2.ts @@ -1,6 +1,6 @@ import { Bytes, MarshalVersion, MetadataMapping, NodeType, Reference, StorageLoader, StorageSaver } from './types' import { - checkReference, + assertReference, common, encryptDecrypt, equalBytes, @@ -8,7 +8,7 @@ import { flattenBytesArray, fromBigEndian, IndexBytes, - keccak256Hash, + serializeVersion, toBigEndianFromUint16, } from './utils' @@ -170,13 +170,13 @@ export class MantarayNode { /// Setters/getters public set setContentAddress(contentAddress: Reference) { - checkReference(contentAddress) + assertReference(contentAddress) this.contentAddress = contentAddress } public set setEntry(entry: Reference) { - checkReference(entry) + assertReference(entry) this.entry = entry @@ -512,12 +512,12 @@ export class MantarayNode { /// Forks const forkSerializations: Uint8Array[] = [] - index.forEach(byte => { + for (const byte of index.forEach()) { const fork = this.forks![byte] if (!fork) throw Error(`Fork indexing error: fork has not found under ${byte} index`) forkSerializations.push(fork.serialize()) - }) + } const bytes = new Uint8Array([ ...this.obfuscationKey!, @@ -574,7 +574,7 @@ export class MantarayNode { indexForks.setBytes = indexBytes offset += 32 - indexForks.forEach(byte => { + for (const byte of indexForks.forEach()) { let fork: MantarayFork if (data.length < offset + nodeForkSizes.nodeType) { @@ -608,7 +608,7 @@ export class MantarayNode { this.forks![byte] = fork offset += nodeForkSize - }) + } } else { throw Error('Wrong mantaray version') } @@ -655,17 +655,6 @@ export function checkForSeparator(node: MantarayNode): boolean { return false } -/** - * The hash length has to be 31 instead of 32 that comes from the keccak hash function - */ -function serializeVersion(version: MarshalVersion): Bytes<31> { - const versionName = 'mantaray' - const versionSeparator = ':' - const hashBytes = keccak256Hash(versionName + versionSeparator + version) - - return hashBytes.slice(0, 31) as Bytes<31> -} - function serializeReferenceLength(entry: Reference): Bytes<1> { const referenceLength = entry.length diff --git a/src/mantaray-v1.ts b/src/mantaray-v1.ts new file mode 100644 index 0000000..aa1b85e --- /dev/null +++ b/src/mantaray-v1.ts @@ -0,0 +1,752 @@ +import type { + Bytes, + MarshalVersion, + MetadataMapping, + Random32BytesFn, + Reference, + StorageLoader, + StorageSaver, +} from './types' +import { + assertMetadataMapping, + assertNonNegativeInteger, + assertReference, + common, + deserializeMetadata, + encryptDecrypt, + equalBytes, + flattenBytesArray, + IndexBytes, + isPrefixedBy, + null32Bytes, + serializeMedata, + serializeMetadataInSegment, + serializeVersion, +} from './utils' + +type ForkMapping = { [key: number]: MantarayFork } +type RecursiveSaveReturnType = { reference: Reference; changed: boolean } + +const NODE_FORK_SIZES = { + prefixLength: 1, + /** Bytes length before `reference` */ + preReference: 32, + prefixMax: 31, + // Mantaray reference is either 32 or 64 bytes long +} as const + +const NODE_HEADER_SIZES = { + obfuscationKey: 32, + versionHash: 31, + /** 3 bit flags + 5 bit forkMetadataSegmentSize */ + nodeFeatures: 1, + get full(): number { + return NODE_HEADER_SIZES.obfuscationKey + NODE_HEADER_SIZES.versionHash + NODE_HEADER_SIZES.nodeFeatures + }, + // entry is either 32 or 64 bytes long +} as const + +/** Maximum segmentbyte size for fork metadata */ +const MAX_FORK_METADATA_SEGMENT_SIZE = 2 ^ 5 + +/// ERRORS + +class NotFoundError extends Error { + constructor(remainingPathBytes: Uint8Array, checkedPrefixBytes?: Uint8Array) { + const remainingPath = new TextDecoder().decode(remainingPathBytes) + const prefixInfo = checkedPrefixBytes + ? `Prefix on lookup: ${new TextDecoder().decode(checkedPrefixBytes)}` + : 'No fork on the level' + super(`Path has not found in the manifest. Remaining path on lookup: ${remainingPath}. ${prefixInfo}`) + } +} + +class EmptyPathError extends Error { + constructor() { + super('Empty path') + } +} + +class UndefinedField extends Error { + constructor(field: string) { + super(`"${field}" field is not initialized.`) + } +} + +class RandomBytesFnUndefined extends Error { + constructor() { + super( + 'Obfuscation key generator is not passed for `addFork` method.\n' + + `it is required because its parent node has obfuscation key.`, + ) + } +} + +// LOGIC + +export class MantarayFork { + /** + * @param prefix the non-branching part of the subpath + * @param node in memory structure that represents the Node + */ + constructor(public prefix: Uint8Array, public node: MantarayNode) {} + + /** + * The obfuscation on the data happens in node serialisation + * @forkMetadata metadata about the node on the fork level. The segmentsize is the reserved byte length devided by 32 + */ + public serialize(segmentSize = 0): Uint8Array { + const metadata = this.node.forkMetadata + const prefixLengthBytes = new Uint8Array(1) + prefixLengthBytes[0] = this.prefix.length // on addFork it is always trimmed under `prefixMax` + + if (this.node.isContinuousNode && this.prefix.length === 31) { + prefixLengthBytes[0] += 1 // continuous node rule + } + + const prefixBytes = new Uint8Array(NODE_FORK_SIZES.prefixMax) + prefixBytes.set(this.prefix) + + const mantarayReference: Reference | undefined = this.node.contentAddress + + if (!mantarayReference) throw Error('cannot serialize MantarayFork because it does not have contentAddress') + + const data = new Uint8Array([...prefixLengthBytes, ...prefixBytes, ...mantarayReference]) + + if (segmentSize > 0) { + const metadataBytes = serializeMetadataInSegment(metadata, segmentSize) + + return new Uint8Array([...data, ...metadataBytes]) + } + + return data + } + + public static deserialize(data: Uint8Array, encEntry: boolean): MantarayFork { + let prefixLength = data[0] + let continuousNode = false + + if (prefixLength > NODE_FORK_SIZES.prefixMax) { + prefixLength = 31 + continuousNode = true + } + + const prefix = data.slice(NODE_FORK_SIZES.prefixLength, NODE_FORK_SIZES.prefixLength + prefixLength) + const node = new MantarayNode() + node.isContinuousNode = continuousNode + const fork = new MantarayFork(prefix, node) + const entryLength = encEntry ? 64 : 32 + // on deserialisation the content address stores the fork's mantaray node address + const contentAddress = data.slice(NODE_FORK_SIZES.preReference, NODE_FORK_SIZES.preReference + entryLength) as + | Bytes<32> + | Bytes<64> + const metadataBytes = data.slice(NODE_FORK_SIZES.preReference + entryLength) + + if (metadataBytes.length > 0) { + node.forkMetadata = deserializeMetadata(metadataBytes) + } + + // contentAddress set always at the end of the deserialisation because the dirty flag is based on this as well + node.contentAddress = contentAddress + + return fork + } +} + +export class MantarayNode { + private _obfuscationKey: Bytes<32> + /** whether the node has entry field */ + private _hasEntry: boolean + /** the entry field is an encrypted reference and 64 bytes long */ + private _encEntry: boolean + /** whether the node has additional forks or not */ + private _isEdge: boolean + /** reference of a loaded manifest node. if undefined, the node can be handled as `dirty` */ + private _contentAddress?: Reference + /** reference of a content that the manifest refers to */ + private _entry?: Reference + private _nodeMetadata?: MetadataMapping + /** + * Metadata about the node sersialized on its parent level. + * + * It is handled here instead of `MantarayFork`, because of trie structure rearrangements on `addFork` + */ + private _forkMetadata?: MetadataMapping + /** this value * the segment size (32) gives the reserved bytesize for metadata under each forkdata */ + private _forkMetadataSegmentSize: number + /** + * Prefix is limited to 31 bytes. When it overflows a new `MantarayNode` is created to store the overflowing prefix. + * This new mantaray node will act as a continuous node because the childnode prefix is too long + */ + public isContinuousNode: boolean + /** Forks of the manifest. */ + public forks?: ForkMapping + + public constructor() { + this._hasEntry = false + this._encEntry = false + this._isEdge = false + this.isContinuousNode = false + this._forkMetadataSegmentSize = 0 + this._obfuscationKey = new Uint8Array(32) as Bytes<32> + } + + /// Setters/getters + + public set contentAddress(contentAddress: Reference | undefined) { + if (!contentAddress) { + this._contentAddress = undefined + + return + } + assertReference(contentAddress) + + this._contentAddress = contentAddress + } + + public get contentAddress(): Reference | undefined { + return this._contentAddress + } + + public set entry(entry: Reference | undefined) { + if (!entry) { + this._hasEntry = false + + return + } + assertReference(entry) + + this._entry = entry + this._hasEntry = true + + if (entry.length === 64) this._encEntry = true + this.makeDirty() + } + + public get entry(): Reference | undefined { + return this._entry + } + + public set obfuscationKey(obfuscationKey: Bytes<32>) { + if (!(obfuscationKey instanceof Uint8Array)) { + throw new Error('Given obfuscationKey is not an Uint8Array instance.') + } + + if (obfuscationKey.length !== 32) { + throw new Error(`Wrong obfuscationKey length. Entry only can be 32 length in bytes`) + } + + this._obfuscationKey = obfuscationKey + this.makeDirty() + } + + public get obfuscationKey(): Bytes<32> { + return this._obfuscationKey + } + + public set nodeMetadata(metadata: MetadataMapping | undefined) { + if (!metadata) { + this._nodeMetadata = undefined + + return + } + assertMetadataMapping(metadata) + this._nodeMetadata = metadata + this.makeDirty() + } + + public get nodeMetadata(): MetadataMapping | undefined { + return this._nodeMetadata + } + + public set forkMetadata(metadata: MetadataMapping | undefined) { + if (!metadata) { + this._nodeMetadata = undefined + + return + } + assertMetadataMapping(metadata) + this._forkMetadata = metadata + this.makeDirty() + } + + public get forkMetadata(): MetadataMapping | undefined { + return this._forkMetadata + } + + public set forkMetadataSegmentSize(value: number) { + assertNonNegativeInteger(value) + + if (value > 31) throw new Error(`forkMetadataSegmentSize is greater than 31. Got: ${value}`) + this._forkMetadataSegmentSize = value + } + + public get forkMetadataSegmentSize(): number { + return this._forkMetadataSegmentSize + } + + public get isEdge(): boolean { + return this._isEdge + } + + public get hasEntry(): boolean { + return this._hasEntry + } + + public get encEntry(): boolean { + return this._encEntry + } + + public get metadata(): MetadataMapping | undefined { + if (!this._forkMetadata && !this._nodeMetadata) return undefined + + return { + ...this.nodeMetadata, + ...this._forkMetadata, + } + } + + /// Node type related functions + /// dirty flag is not necessary to be set + + public isValueType(): boolean { + return this._hasEntry + } + + /** + * The node either has metadata on node level or fork level + * for forkMetadata parent node has to be fetched + */ + public isWithMetadataType(): boolean { + return Boolean(this._forkMetadata) || Boolean(this._nodeMetadata) + } + + public addFork( + path: Uint8Array, + attributes?: { + entry?: Reference + nodeMetadata?: MetadataMapping + forkMetadata?: MetadataMapping + obfuscationKeyGenerator?: Random32BytesFn + }, + ): void { + const entry: Reference | undefined = attributes?.entry + const nodeMetadata: MetadataMapping | undefined = attributes?.nodeMetadata + const forkMetadata: MetadataMapping | undefined = attributes?.forkMetadata + const obfuscationKeyGenerator: Random32BytesFn | undefined = attributes?.obfuscationKeyGenerator + + // refers to the root node of the trie + if (path.length === 0) { + if (entry) this.entry = entry + + this.nodeMetadata = nodeMetadata + this.forkMetadata = forkMetadata + + return + } + + if (this.isDirty() && !this.forks) this.forks = {} + + if (!this.forks) throw Error(`Fork mapping is not defined in the manifest`) + + const fork = this.forks[path[0]] + + if (!fork) { + const newNode = new MantarayNode() + + if (!equalBytes(this._obfuscationKey, null32Bytes)) { + if (!obfuscationKeyGenerator) { + throw new RandomBytesFnUndefined() + } + newNode.obfuscationKey = obfuscationKeyGenerator() + } + + // Continuous node + if (path.length > NODE_FORK_SIZES.prefixMax) { + const prefix = path.slice(0, NODE_FORK_SIZES.prefixMax) + const rest = path.slice(NODE_FORK_SIZES.prefixMax) + newNode.addFork(rest, attributes) + newNode.isContinuousNode = true + this.forks[path[0]] = new MantarayFork(prefix, newNode) + this._isEdge = true + this.makeDirty() + + return + } + + // create non-continuous node + if (entry) newNode.entry = entry + + newNode.forkMetadata = forkMetadata + newNode.nodeMetadata = nodeMetadata + + this.forks[path[0]] = new MantarayFork(path, newNode) + this.makeDirty() + this._isEdge = true + + return + } + + /// Case when there is an existing fork for the given (sub)path + + const commonPath = common(fork.prefix, path) + /** restPath of the existing fork's path that differs from the new path */ + const restPath = fork.prefix.slice(commonPath.length) + let newNode = fork.node + + // create new parent node where the path starts to differ in the current node's prefix + // this parent node will have two children: the current node and the new node with the given path + if (restPath.length > 0) { + // create new node for the common path + newNode = new MantarayNode() + + if (equalBytes(this._obfuscationKey, null32Bytes)) { + newNode.obfuscationKey = new Uint8Array(32) as Bytes<32> + } else { + if (!obfuscationKeyGenerator) { + throw new RandomBytesFnUndefined() + } + newNode.obfuscationKey = obfuscationKeyGenerator() + } + newNode.forks = {} + const newFork = new MantarayFork(restPath, fork.node) + + if (fork.node.isContinuousNode) { + handleTrimmedContinuousFork(newFork) + } + newNode.forks[restPath[0]] = newFork // copy old parent node to its remaining path + newNode._isEdge = true + } + + // NOTE: special case on edge split + // newNode will be the common path edge node + // newNode's prefix is a subset of the given `path`, here the desired fork will be added with the truncated path + newNode.addFork(path.slice(commonPath.length), attributes) + this.forks[path[0]] = new MantarayFork(commonPath, newNode) + this._isEdge = true + + this.makeDirty() + } + + /** + * Gives back a MantarayFork under the given path + * + * @param path valid path within the MantarayNode + * @returns MantarayFork with the last unique prefix and its node + * @throws error if there is no node under the given path + */ + public getForkAtPath(path: Uint8Array): MantarayFork { + if (path.length === 0) throw EmptyPathError + + if (!this.forks) throw Error(`Fork mapping is not defined in the manifest`) + + const fork = this.forks[path[0]] + + if (!fork) throw new NotFoundError(path) + + if (!isPrefixedBy(path, fork.prefix)) throw new NotFoundError(path, fork.prefix) + + const rest = path.slice(fork.prefix.length) + + if (rest.length === 0) return fork + + return fork.node.getForkAtPath(rest) + } + + /** + * Removes a path from the node + * + * @param path Uint8Array of the path of the node intended to remove + */ + public removePath(path: Uint8Array): void { + if (path.length === 0) throw EmptyPathError + + if (!this.forks) throw Error(`Fork mapping is not defined in the manifest`) + + const fork = this.forks[path[0]] + + if (!fork) throw new NotFoundError(path) + + if (!isPrefixedBy(path, fork.prefix)) throw new NotFoundError(path, fork.prefix) + + const rest = path.slice(fork.prefix.length) + + if (rest.length === 0) { + // full path matched + this.makeDirty() + delete this.forks[path[0]] + + return + } + + fork.node.removePath(rest) + } + + public async load(storageLoader: StorageLoader, reference: Reference): Promise { + if (!reference) throw Error('Reference is undefined at manifest load') + + const data = await storageLoader(reference) + this.deserialize(data) + + this.contentAddress = reference + } + + /** + * Saves dirty flagged ManifestNodes and its forks recursively + * @returns Reference of the top manifest node. + */ + public async save(storageSaver: StorageSaver): Promise { + const { reference } = await this.recursiveSave(storageSaver) + + return reference + } + + public isDirty(): boolean { + return this._contentAddress === undefined + } + + public makeDirty(): void { + this._contentAddress = undefined + } + + public serialize(): Uint8Array { + const obfuscationKey = this._obfuscationKey || new Uint8Array(32) + + if (!this.forks) { + if (!this._entry) throw new UndefinedField('entry') + this.forks = {} //if there were no forks initialized it is not indended to be + } + + /// Header + const version: MarshalVersion = '1.0' + const versionBytes: Bytes<31> = serializeVersion(version) + + /// Entry + const entry = this._entry || new Uint8Array() + + /// Forks and ForkIndexBytes + + /// ForksIndexBytes + let indexBytes: Bytes<32> | Bytes<0> = new Uint8Array() as Bytes<0> + const forkSerializations: Uint8Array[] = [] + + if (this._isEdge) { + const index = new IndexBytes() + for (const [forkIndex, fork] of Object.entries(this.forks)) { + index.setByte(Number(forkIndex)) + + if (fork.node.forkMetadata) { + // maximum selection among forkMetadata + const metadataBytes = serializeMedata(fork.node.forkMetadata) + const forkMetadataSegmentSize = Math.ceil(metadataBytes.length / 32) + + if (forkMetadataSegmentSize > MAX_FORK_METADATA_SEGMENT_SIZE) { + throw new Error( + `metadata size ${forkMetadataSegmentSize} is bigger than the limit ${MAX_FORK_METADATA_SEGMENT_SIZE}.`, + ) + } + + if (forkMetadataSegmentSize > this.forkMetadataSegmentSize) { + this.forkMetadataSegmentSize = forkMetadataSegmentSize + } + } + } + indexBytes = index.getBytes + + /// Forks + for (const byte of index.forEach()) { + const fork = this.forks![byte] + + if (!fork) throw Error(`Fork indexing error: fork has not found under ${byte} index`) + forkSerializations.push(fork.serialize(this.forkMetadataSegmentSize)) + } + } + + const nodeFeatures: Bytes<1> = this.serializeFeatures() + + /// NodeMetadata + let nodeMetadataBytes = new Uint8Array(0) + + if (this._nodeMetadata) { + nodeMetadataBytes = serializeMedata(this._nodeMetadata) + } + + const bytes = new Uint8Array([ + ...obfuscationKey, + ...versionBytes, + ...nodeFeatures, + ...entry, + ...indexBytes, + ...flattenBytesArray(forkSerializations), + ...nodeMetadataBytes, + ]) + + /// Encryption + /// perform XOR encryption on bytes after obfuscation key + encryptDecrypt(obfuscationKey, bytes, obfuscationKey.length) + + return bytes + } + + public deserialize(data: Uint8Array): void { + /// Header + const fullNodeHeaderSize = NODE_HEADER_SIZES.full + + if (data.length < fullNodeHeaderSize) throw Error('The serialised input is too short') + + this._obfuscationKey = new Uint8Array(data.slice(0, NODE_HEADER_SIZES.obfuscationKey)) as Bytes<32> + // perform XOR decryption on bytes after obfuscation key + encryptDecrypt(this._obfuscationKey, data, this._obfuscationKey.length) + + const versionHash = data.slice( + NODE_HEADER_SIZES.obfuscationKey, + NODE_HEADER_SIZES.obfuscationKey + NODE_HEADER_SIZES.versionHash, + ) + + if (!equalBytes(versionHash, serializeVersion('1.0'))) { + throw new Error('The data is not Mantaray 1.0') + } + + const nodeFeaturesByte = data[fullNodeHeaderSize - 1] + this.deserializeFeatures(nodeFeaturesByte) + + /// Entry + let refBytesSize = 0 + + if (this._hasEntry) { + if (this._encEntry) { + refBytesSize = 64 + } else { + refBytesSize = 32 + } + this.entry = data.slice(fullNodeHeaderSize, fullNodeHeaderSize + refBytesSize) as Reference + } + let offset = fullNodeHeaderSize + refBytesSize + + /// Fork + if (this._isEdge) { + /// Fork Bytes index mapping + const indexBytes = data.slice(offset, offset + 32) as Bytes<32> + const indexForks = new IndexBytes() + indexForks.setBytes = indexBytes + offset += 32 + + /// Forks + this.forks = {} + const forkSize = NODE_FORK_SIZES.preReference + (this._encEntry ? 64 : 32) + this._forkMetadataSegmentSize * 32 + for (const byte of indexForks.forEach()) { + if (data.length < offset + forkSize) { + throw Error(`There is not enough size to read fork data at offset ${offset}`) + } + + const forkBytes = data.slice(offset, offset + forkSize) + const fork = MantarayFork.deserialize(forkBytes, this._encEntry) + + this.forks![byte] = fork + + offset += forkSize + } + } + + /// NodeMetadata + const metadataBytes = data.slice(offset) + + if (metadataBytes.length > 0) { + this._nodeMetadata = deserializeMetadata(metadataBytes) + } + } + + private serializeFeatures(): Bytes<1> { + if (this._encEntry && !this._hasEntry) { + throw new Error('encEntry is true when hasEntry is false at serialisation') + } + + let nodeFeautes = this._forkMetadataSegmentSize + // add flags + nodeFeautes = nodeFeautes << 1 + nodeFeautes += this._isEdge ? 1 : 0 + nodeFeautes = nodeFeautes << 1 + nodeFeautes += this._encEntry ? 1 : 0 + nodeFeautes = nodeFeautes << 1 + nodeFeautes += this._hasEntry ? 1 : 0 + + const bytes = new Uint8Array(1) as Bytes<1> + bytes[0] = nodeFeautes + + return bytes + } + + private deserializeFeatures(nodeFeaturesByte: number) { + // deserialize flags + this._hasEntry = nodeFeaturesByte % 2 === 1 + nodeFeaturesByte = nodeFeaturesByte >> 1 + this._encEntry = nodeFeaturesByte % 2 === 1 + nodeFeaturesByte = nodeFeaturesByte >> 1 + this._isEdge = nodeFeaturesByte % 2 === 1 + nodeFeaturesByte = nodeFeaturesByte >> 1 + + // deserialize segmentsize + this.forkMetadataSegmentSize = nodeFeaturesByte + } + + private async recursiveSave(storageSaver: StorageSaver): Promise { + // save forks first recursively + const savePromises: Promise[] = [] + + if (!this.forks) this.forks = {} // there were no intention to define fork(s) + for (const fork of Object.values(this.forks)) { + savePromises.push(fork.node.recursiveSave(storageSaver)) + } + const savedReturns = await Promise.all(savePromises) + + if (this._contentAddress && savedReturns.every(v => !v.changed)) { + return { reference: this._contentAddress, changed: false } + } + + // save the actual manifest as well + const data = this.serialize() + const reference = await storageSaver(data) + + this.contentAddress = reference + + return { reference, changed: true } + } +} + +/** loads all nodes recursively */ +export async function loadAllNodes(storageLoader: StorageLoader, node: MantarayNode): Promise { + if (!node.forks) return + + for (const fork of Object.values(node.forks)) { + if (fork.node.contentAddress) await fork.node.load(storageLoader, fork.node.contentAddress) + await loadAllNodes(storageLoader, fork.node) + } +} + +/** + * Merge the given continuous fork with its only child + * Used for tree structure optimalisation on continuous nodes + */ +function handleTrimmedContinuousFork(fork: MantarayFork): void { + const forkKeys = Object.keys(fork.node.forks || {}) + + if (!fork.node.isContinuousNode || forkKeys.length !== 1) { + throw new Error( + 'The given fork is not a valid continuous node\n' + + `\tcontinuous node flag: ${fork.node.isContinuousNode}\n` + + `\tforkeys: ${forkKeys}`, + ) + } + + const childFork = fork.node.forks![Number(forkKeys[0])] + const commonPrefixLength = fork.prefix.length + childFork.prefix.length + + if (commonPrefixLength < 31) { + fork.node = childFork.node + fork.prefix = new Uint8Array([...fork.prefix, ...childFork.prefix]) + } else { + const remainingPrefixBytes = 31 - fork.prefix.length + fork.prefix = new Uint8Array([...fork.prefix, ...childFork.prefix.slice(0, remainingPrefixBytes)]) + childFork.prefix = new Uint8Array([...childFork.prefix.slice(remainingPrefixBytes)]) + } +} diff --git a/src/types/index.ts b/src/types/index.ts index 36c7ab4..0405b4c 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -1,4 +1,4 @@ -export const marshalVersionValues = ['0.1', '0.2'] as const +export const marshalVersionValues = ['0.1', '0.2', '1.0'] as const export type MarshalVersion = typeof marshalVersionValues[number] @@ -16,7 +16,7 @@ export enum NodeType { mask = 255, } -export type MetadataMapping = { [key: string]: string } +export type MetadataMapping = { [key: string]: any } export type StorageLoader = (reference: Reference) => Promise @@ -26,3 +26,5 @@ export type StorageHandler = { load: StorageLoader save: StorageSaver } + +export type Random32BytesFn = () => Bytes<32> diff --git a/src/utils.ts b/src/utils.ts index db7354c..ed1c761 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,9 +1,14 @@ -import getRandomValues from 'get-random-values' import type { Message } from 'js-sha3' import { keccak256 } from 'js-sha3' -import { Bytes, Reference } from './types' +import { Bytes, MarshalVersion, MetadataMapping, Reference } from './types' +import { Utils } from '@ethersphere/bee-js' -export function checkReference(ref: Reference): void | never { +export const { hexToBytes } = Utils.Hex + +/** only for comparisation. For assigment always create new uint8array! */ +export const null32Bytes = new Uint8Array(32) + +export function assertReference(ref: unknown): asserts ref is Reference { if (!(ref instanceof Uint8Array)) { throw new Error('Given referennce is not an Uint8Array instance.') } @@ -13,6 +18,26 @@ export function checkReference(ref: Reference): void | never { } } +function isMetadataMapping(metadata: unknown): metadata is MetadataMapping { + return typeof metadata === 'object' && !Array.isArray(metadata) +} + +export function assertMetadataMapping(metadata: unknown): asserts metadata is MetadataMapping { + if (!isMetadataMapping(metadata)) { + throw new Error('given metadata is not a valid metadata object for Mantaray serialisation') + } +} + +function isNonNegativeInteger(value: unknown): value is number { + return typeof value === 'number' && value >= 0 && Number.isInteger(value) +} + +export function assertNonNegativeInteger(value: unknown): asserts value is number { + if (!isNonNegativeInteger(value)) { + throw new Error(`Given value ${value} is not a non negative integer`) + } +} + export function checkBytes(bytes: unknown, length: number): asserts bytes is Bytes { if (!(bytes instanceof Uint8Array)) throw Error('Cannot set given bytes, because is not an Uint8Array type') @@ -22,13 +47,12 @@ export function checkBytes(bytes: unknown, length: number } /** - * Finds starting index `searchFor` in `element` Uin8Arrays + * Gives back on which index of `element` starts the `searchFor` Uint8Array. + * `searchFor` element has to be included in `element`, otherwise it returns -1 * - * If `searchFor` is not found in `element` it returns -1 - * - * @param element - * @param searchFor - * @returns starting index of `searchFor` in `element` + * @param element The byte array in which the function will search for `searchFor` :) + * @param searchFor The byte array that `element` should include. + * @returns the index of `element` where `searchFor` starts. */ export function findIndexOfArray(element: Uint8Array, searchFor: Uint8Array): number { for (let i = 0; i <= element.length - searchFor.length; i++) { @@ -43,6 +67,25 @@ export function findIndexOfArray(element: Uint8Array, searchFor: Uint8Array): nu return -1 } +/** + * Checks whether element is prefixed by the 2nd byte array parameter. + * + * @param element The byte array in which the function will search for prefix + * @param prefix The byte array that `element` should start with. + * @returns whether the element starts with the given prefix or not. + */ +export function isPrefixedBy(element: Uint8Array, prefix: Uint8Array): boolean { + if (element.length < prefix.length) { + return false + } + + for (let i = 0; i < prefix.length; i++) { + if (element[i] !== prefix[i]) return false + } + + return true +} + /** Overwrites `a` bytearrays elements with elements of `b` starts from `i` */ export function overwriteBytes(a: Uint8Array, b: Uint8Array, i = 0): void { if (a.length < b.length + i) { @@ -131,12 +174,6 @@ export function toBigEndianFromUint16(value: number): Bytes<2> { return new Uint8Array([value >> 8, value]) as Bytes<2> } -export function gen32Bytes(): Bytes<32> { - const bytes = new Uint8Array(32) - - return getRandomValues(bytes) as Bytes<32> -} - /** It returns the common bytes of the two given byte arrays until the first byte difference */ export function common(a: Uint8Array, b: Uint8Array): Uint8Array { let c = new Uint8Array(0) @@ -184,11 +221,66 @@ export class IndexBytes { } /** Iterates through on the indexed byte values */ - public forEach(hook: (byte: number) => void): void { + public *forEach(): Generator { for (let i = 0; i <= 255; i++) { if (this.checkBytePresent(i)) { - hook(i) + yield i } } } } + +/** + * The hash length has to be 31 instead of 32 that comes from the keccak hash function + */ +export function serializeVersion(version: MarshalVersion): Bytes<31> { + const versionName = 'mantaray' + const versionSeparator = ':' + const hashBytes = keccak256Hash(versionName + versionSeparator + version) + + return hashBytes.slice(0, 31) as Bytes<31> +} + +export function serializeMedata(metadata: MetadataMapping): Uint8Array { + const jsonString = JSON.stringify(metadata) + + return new TextEncoder().encode(jsonString) +} + +/** Returns segment padded stringified JSON byte array */ +export function serializeMetadataInSegment(metadata: MetadataMapping | undefined, segmentSize: number): Uint8Array { + if (!metadata) { + const bytes = new Uint8Array(segmentSize * 32) + bytes.fill(32) // space padding + + return bytes + } + + const jsonString = JSON.stringify(metadata) + const jsonData = new TextEncoder().encode(jsonString) + const remainingSegmentBytes = segmentSize * 32 - jsonData.length + + if (remainingSegmentBytes < 0) { + throw new Error( + `serialized metadata does not fit into the reserved byte size for forkMetadata (metadata size ${ + jsonData.length + } > reserved metadata size ${segmentSize * 32})`, + ) + } + + const paddingBytes = new Uint8Array(remainingSegmentBytes) + paddingBytes.fill(32) // space padding + + return new Uint8Array([...jsonData, ...paddingBytes]) +} + +/** If the JSON deserialisation of the data is not succesful, it will give back undefined */ +export function deserializeMetadata(data: Uint8Array): MetadataMapping | undefined { + try { + const jsonString = new TextDecoder().decode(data).trimEnd() + + return JSON.parse(jsonString) + } catch (e) { + return undefined + } +} diff --git a/test/index.spec.ts b/test/index.spec.ts deleted file mode 100644 index 8f5e877..0000000 --- a/test/index.spec.ts +++ /dev/null @@ -1,155 +0,0 @@ -import { Bee, Utils } from '@ethersphere/bee-js' -import FS from 'fs' -import { join } from 'path' -import { MantarayNode } from '../src' -import { loadAllNodes } from '../src/node' -import type { Reference } from '../src/types' -import { commonMatchers, getSampleMantarayNode } from './utils' - -commonMatchers() -const beeUrl = process.env.BEE_API_URL || 'http://localhost:1633' -const bee = new Bee(beeUrl) - -const hexToBytes = (hexString: string): Reference => { - return Utils.Hex.hexToBytes(hexString) -} - -const saveFunction = async (data: Uint8Array): Promise => { - const hexRef = await bee.uploadData(process.env.BEE_POSTAGE, data) - - return hexToBytes(hexRef) -} - -const loadFunction = async (address: Reference): Promise => { - return bee.downloadData(Utils.Hex.bytesToHex(address)) -} - -/** Uploads the testpage directory with bee-js and return back its root manifest data */ -const beeTestPageManifestData = async (): Promise => { - const contentHash = await bee.uploadFilesFromDirectory(process.env.BEE_POSTAGE, join(__dirname, 'testpage'), { - pin: true, - indexDocument: 'index.html', - }) - - return bee.downloadData(contentHash) //only download its manifest -} - -it('should serialize/deserialize the same as Bee', async () => { - const data = await beeTestPageManifestData() - const node = new MantarayNode() - node.deserialize(data) - await loadAllNodes(loadFunction, node) - const serialization = node.serialize() - // // expect(serialization).toBe(data) -> mantaray-js does not padding the json metadata - const nodeAgain = new MantarayNode() - nodeAgain.deserialize(serialization) - await loadAllNodes(loadFunction, nodeAgain) - expect(nodeAgain).toStrictEqual(node) -}) - -it('should construct manifests of testpage folder', async () => { - const data = await beeTestPageManifestData() - const node = new MantarayNode() - node.deserialize(data) - await loadAllNodes(loadFunction, node) - - const testPage = join(__dirname, 'testpage') - const indexHtmlBytes = FS.readFileSync(join(testPage, 'index.html')) - const imageBytes = FS.readFileSync(join(testPage, 'img', 'icon.png')) - const [indexReference, imageReference, textReference] = await Promise.all([ - bee.uploadData(process.env.BEE_POSTAGE, indexHtmlBytes), - bee.uploadData(process.env.BEE_POSTAGE, imageBytes), - bee.uploadData(process.env.BEE_POSTAGE, new Uint8Array([104, 97, 108, 105])), - ]) - const utf8ToBytes = (value: string): Uint8Array => { - return new TextEncoder().encode(value) - } - const iNode = new MantarayNode() - iNode.addFork(utf8ToBytes('index.html'), hexToBytes(indexReference), { - 'Content-Type': 'text/html; charset=utf-8', - Filename: 'index.html', - }) - iNode.addFork(utf8ToBytes('img/icon.png.txt'), hexToBytes(textReference), { - 'Content-Type': '', // FIXME: The bee node assigns empty string to Content Type in this case - Filename: 'icon.png.txt', - }) - iNode.addFork(utf8ToBytes('img/icon.png'), hexToBytes(imageReference), { - 'Content-Type': 'image/png', - Filename: 'icon.png', - }) - iNode.addFork(utf8ToBytes('/'), new Uint8Array(32) as Reference, { - 'website-index-document': 'index.html', - }) - const iNodeRef = await iNode.save(saveFunction) - expect(Object.keys(iNode.forks)).toStrictEqual(Object.keys(node.forks)) - const marshal = iNode.serialize() - const iNodeAgain = new MantarayNode() - iNodeAgain.deserialize(marshal) - await loadAllNodes(loadFunction, iNodeAgain) - // check after serialization the object is same - expect(iNode).toBeEqualNode(iNodeAgain) - // check bee manifest is equal with the constructed one. - expect(iNode).toBeEqualNode(node) - // eslint-disable-next-line no-console - console.log('Constructed root manifest hash', Utils.Hex.bytesToHex(iNodeRef)) -}) - -it('should remove fork then upload it', async () => { - const sampleNode = getSampleMantarayNode() - const node = sampleNode.node - const path1 = sampleNode.paths[0] - const path2 = sampleNode.paths[1] - // save sample node - const refOriginal = await node.save(saveFunction) - /** node where the fork set will change */ - const getCheckNode = (): MantarayNode => { - return node.getForkAtPath(new TextEncoder().encode('path1/valami/')).node - } - const checkNode1 = getCheckNode() - const refCheckNode1 = checkNode1.getContentAddress - // current forks of node - expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) - node.removePath(path2) - const refDeleted = await node.save(saveFunction) - // root reference should not remain the same - expect(refDeleted).not.toStrictEqual(refOriginal) - node.load(loadFunction, refDeleted) - // 'm' key of prefix table disappeared - const checkNode2 = getCheckNode() - expect(Object.keys(checkNode2.forks)).toStrictEqual([String(path1[13])]) - // reference should differ because the changed fork set - const refCheckNode2 = checkNode2.getContentAddress - expect(refCheckNode2).not.toStrictEqual(refCheckNode1) -}) - -it('should modify the tree and call save on the mantaray root then load it back correctly', async () => { - const data = await beeTestPageManifestData() - const node = new MantarayNode() - node.deserialize(data) - await loadAllNodes(loadFunction, node) - - // it modifies a node value and then 2 levels above a descendant node - const firstNode = node.forks[105].node - const descendantNode = firstNode.forks[109].node.forks[46].node - firstNode.setMetadata = { - ...firstNode.getMetadata, - additionalParam: 'first', - } - descendantNode.setMetadata = { - ...descendantNode.getMetadata, - additionalParam: 'second', - } - - const reference = await node.save(saveFunction) - const nodeAgain = new MantarayNode() - await nodeAgain.load(loadFunction, reference) - await loadAllNodes(loadFunction, nodeAgain) - const firstNodeAgain = nodeAgain.forks[105].node - const descendantNodeAgain = firstNodeAgain.forks[109].node.forks[46].node - - expect(firstNodeAgain.getMetadata).toStrictEqual(firstNode.getMetadata) - expect(firstNodeAgain.getMetadata['additionalParam']).toBe('first') - // fails if the save does not walk the whole tree - expect(descendantNodeAgain.getMetadata).toStrictEqual(descendantNode.getMetadata) - expect(descendantNodeAgain.getMetadata['additionalParam']).toBe('second') -}) diff --git a/test/integration/mantaray-0_2.spec.ts b/test/integration/mantaray-0_2.spec.ts new file mode 100644 index 0000000..be21a82 --- /dev/null +++ b/test/integration/mantaray-0_2.spec.ts @@ -0,0 +1,156 @@ +import { Bee, Utils } from '@ethersphere/bee-js' +import FS from 'fs' +import { join } from 'path' +import { loadAllNodes, MantarayNode } from '../../src/mantaray-v0_2' +import type { Reference } from '../../src/types' +import { commonMatchers, getSampleMantarayNode0_2 } from '../utils' + +commonMatchers() +const beeUrl = process.env.BEE_API_URL || 'http://localhost:1633' +const bee = new Bee(beeUrl) + +const hexToBytes = (hexString: string): Reference => { + return Utils.Hex.hexToBytes(hexString) +} + +const saveFunction = async (data: Uint8Array): Promise => { + const hexRef = await bee.uploadData(process.env.BEE_POSTAGE, data) + + return hexToBytes(hexRef) +} + +const loadFunction = async (address: Reference): Promise => { + return bee.downloadData(Utils.Hex.bytesToHex(address)) +} + +/** Uploads the testpage directory with bee-js and return back its root manifest data */ +const beeTestPageManifestData = async (): Promise => { + const contentHash = await bee.uploadFilesFromDirectory(process.env.BEE_POSTAGE, join(__dirname, 'testpage'), { + pin: true, + indexDocument: 'index.html', + }) + + return bee.downloadData(contentHash) //only download its manifest +} + +describe('Mantaray 0.2 integration tests', () => { + it('should serialize/deserialize the same as Bee', async () => { + const data = await beeTestPageManifestData() + const node = new MantarayNode() + node.deserialize(data) + await loadAllNodes(loadFunction, node) + const serialization = node.serialize() + // // expect(serialization).toBe(data) -> mantaray-js does not padding the json metadata + const nodeAgain = new MantarayNode() + nodeAgain.deserialize(serialization) + await loadAllNodes(loadFunction, nodeAgain) + expect(nodeAgain).toStrictEqual(node) + }) + + it('should construct manifests of testpage folder', async () => { + const data = await beeTestPageManifestData() + const node = new MantarayNode() + node.deserialize(data) + await loadAllNodes(loadFunction, node) + + const testPage = join(__dirname, 'testpage') + const indexHtmlBytes = FS.readFileSync(join(testPage, 'index.html')) + const imageBytes = FS.readFileSync(join(testPage, 'img', 'icon.png')) + const [indexReference, imageReference, textReference] = await Promise.all([ + bee.uploadData(process.env.BEE_POSTAGE, indexHtmlBytes), + bee.uploadData(process.env.BEE_POSTAGE, imageBytes), + bee.uploadData(process.env.BEE_POSTAGE, new Uint8Array([104, 97, 108, 105])), + ]) + const utf8ToBytes = (value: string): Uint8Array => { + return new TextEncoder().encode(value) + } + const iNode = new MantarayNode() + iNode.addFork(utf8ToBytes('index.html'), hexToBytes(indexReference), { + 'Content-Type': 'text/html; charset=utf-8', + Filename: 'index.html', + }) + iNode.addFork(utf8ToBytes('img/icon.png.txt'), hexToBytes(textReference), { + 'Content-Type': '', // FIXME: The bee node assigns empty string to Content Type in this case + Filename: 'icon.png.txt', + }) + iNode.addFork(utf8ToBytes('img/icon.png'), hexToBytes(imageReference), { + 'Content-Type': 'image/png', + Filename: 'icon.png', + }) + iNode.addFork(utf8ToBytes('/'), new Uint8Array(32) as Reference, { + 'website-index-document': 'index.html', + }) + const iNodeRef = await iNode.save(saveFunction) + expect(Object.keys(iNode.forks)).toStrictEqual(Object.keys(node.forks)) + const marshal = iNode.serialize() + const iNodeAgain = new MantarayNode() + iNodeAgain.deserialize(marshal) + await loadAllNodes(loadFunction, iNodeAgain) + // check after serialization the object is same + expect(iNode).toBeEqualNode0_2(iNodeAgain) + // check bee manifest is equal with the constructed one. + expect(iNode).toBeEqualNode0_2(node) + // eslint-disable-next-line no-console + console.log('Constructed root manifest hash', Utils.Hex.bytesToHex(iNodeRef)) + }) + + it('should remove fork then upload it', async () => { + const sampleNode = getSampleMantarayNode0_2() + const node = sampleNode.node + const path1 = sampleNode.paths[0] + const path2 = sampleNode.paths[1] + // save sample node + const refOriginal = await node.save(saveFunction) + /** node where the fork set will change */ + const getCheckNode = (): MantarayNode => { + return node.getForkAtPath(new TextEncoder().encode('path1/valami/')).node + } + const checkNode1 = getCheckNode() + const refCheckNode1 = checkNode1.getContentAddress + // current forks of node + expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) + node.removePath(path2) + const refDeleted = await node.save(saveFunction) + // root reference should not remain the same + expect(refDeleted).not.toStrictEqual(refOriginal) + node.load(loadFunction, refDeleted) + // 'm' key of prefix table disappeared + const checkNode2 = getCheckNode() + expect(Object.keys(checkNode2.forks)).toStrictEqual([String(path1[13])]) + // reference should differ because the changed fork set + const refCheckNode2 = checkNode2.getContentAddress + expect(refCheckNode2).not.toStrictEqual(refCheckNode1) + }) + + it('should modify the tree and call save on the mantaray root then load it back correctly', async () => { + const data = await beeTestPageManifestData() + const node = new MantarayNode() + node.deserialize(data) + await loadAllNodes(loadFunction, node) + + // it modifies a node value and then 2 levels above a descendant node + const firstNode = node.forks[105].node + const descendantNode = firstNode.forks[109].node.forks[46].node + firstNode.setMetadata = { + ...firstNode.getMetadata, + additionalParam: 'first', + } + descendantNode.setMetadata = { + ...descendantNode.getMetadata, + additionalParam: 'second', + } + + const reference = await node.save(saveFunction) + const nodeAgain = new MantarayNode() + await nodeAgain.load(loadFunction, reference) + await loadAllNodes(loadFunction, nodeAgain) + const firstNodeAgain = nodeAgain.forks[105].node + const descendantNodeAgain = firstNodeAgain.forks[109].node.forks[46].node + + expect(firstNodeAgain.getMetadata).toStrictEqual(firstNode.getMetadata) + expect(firstNodeAgain.getMetadata.additionalParam).toBe('first') + // fails if the save does not walk the whole tree + expect(descendantNodeAgain.getMetadata).toStrictEqual(descendantNode.getMetadata) + expect(descendantNodeAgain.getMetadata.additionalParam).toBe('second') + }) +}) diff --git a/test/integration/mantaray-1_0.spec.ts b/test/integration/mantaray-1_0.spec.ts new file mode 100644 index 0000000..fce7047 --- /dev/null +++ b/test/integration/mantaray-1_0.spec.ts @@ -0,0 +1,36 @@ +import { Bee, Utils } from '@ethersphere/bee-js' +import { MantarayV1 } from '../../src' +import { loadAllNodes } from '../../src/mantaray-v1' +import type { Reference } from '../../src/types' +import { commonMatchers, getSampleMantarayNode1_0 } from '../utils' + +commonMatchers() +const beeUrl = process.env.BEE_API_URL || 'http://localhost:1633' +const bee = new Bee(beeUrl) + +const hexToBytes = (hexString: string): Reference => { + return Utils.Hex.hexToBytes(hexString) +} + +const saveFunction = async (data: Uint8Array): Promise => { + const hexRef = await bee.uploadData(process.env.BEE_POSTAGE, data) + + return hexToBytes(hexRef) +} + +const loadFunction = async (address: Reference): Promise => { + return bee.downloadData(Utils.Hex.bytesToHex(address)) +} + +describe('Mantaray 1.0 integration tests', () => { + it('Marshalling whole trie then comparing all nodes with their original', async () => { + const samples = getSampleMantarayNode1_0() + const rootNode = samples.node + const address = await rootNode.save(saveFunction) + const rootNodeAgain = new MantarayV1.MantarayNode() + await rootNodeAgain.load(loadFunction, address) + await loadAllNodes(loadFunction, rootNodeAgain) + + expect(rootNodeAgain).toBeEqualNode1_0(rootNode) + }) +}) diff --git a/test/testpage/img/icon.png b/test/integration/testpage/img/icon.png similarity index 100% rename from test/testpage/img/icon.png rename to test/integration/testpage/img/icon.png diff --git a/test/testpage/img/icon.png.txt b/test/integration/testpage/img/icon.png.txt similarity index 100% rename from test/testpage/img/icon.png.txt rename to test/integration/testpage/img/icon.png.txt diff --git a/test/testpage/index.html b/test/integration/testpage/index.html similarity index 100% rename from test/testpage/index.html rename to test/integration/testpage/index.html diff --git a/test/test-setup.ts b/test/test-setup.ts new file mode 100644 index 0000000..8ba6b04 --- /dev/null +++ b/test/test-setup.ts @@ -0,0 +1,30 @@ +/* eslint-disable no-console */ +import { BeeDebug, DebugPostageBatch } from '@ethersphere/bee-js' + +async function sleep(ms = 1000): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +export default async function testsSetup(): Promise { + if (!process.env.BEE_POSTAGE) { + try { + console.log('Creating postage stamps...') + const beeDebugUrl = process.env.BEE_DEBUG_API_URL || 'http://localhost:1635' + const beeDebug = new BeeDebug(beeDebugUrl) + process.env.BEE_POSTAGE = await beeDebug.createPostageBatch('1', 20) + console.log('BEE_POSTAGE: ', process.env.BEE_POSTAGE) + //wait for chunk to be usable + let postageBatch: DebugPostageBatch + do { + postageBatch = await beeDebug.getPostageBatch(process.env.BEE_POSTAGE) + + console.log('Waiting 1 sec for batch ID settlement...') + await sleep() + } while (!postageBatch.usable) + } catch (e) { + // It is possible that for unit tests the Bee nodes does not run + // so we are only logging errors and not leaving them to propagate + console.error(e) + } + } +} diff --git a/test/unit.spec.ts b/test/unit.spec.ts deleted file mode 100644 index 076efe7..0000000 --- a/test/unit.spec.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { initManifestNode, MantarayNode } from '../src' -import { checkForSeparator } from '../src/node' -import { gen32Bytes } from '../src/utils' -import { getSampleMantarayNode } from './utils' - -it('should init a single mantaray node with a random address', () => { - const node = initManifestNode() - const randAddress = gen32Bytes() - node.setEntry = randAddress - const serialized = node.serialize() - const nodeAgain = new MantarayNode() - nodeAgain.deserialize(serialized) - expect(randAddress).toStrictEqual(nodeAgain.getEntry) -}) - -it('tests getForkAtPath method of node and checkForSeparator function', () => { - const sampleNode = getSampleMantarayNode() - const node = sampleNode.node - expect(() => node.getForkAtPath(new TextEncoder().encode('path/not/exists'))).toThrowError() - - const fork1 = node.getForkAtPath(new TextEncoder().encode('path1/valami/')) // no separator in the descendants - expect(checkForSeparator(fork1.node)).toBeFalsy() - - const path2 = sampleNode.paths[3] // separator in the descendants - const fork2 = node.getForkAtPath(path2) - expect(checkForSeparator(fork2.node)).toBeTruthy() - - const path3 = sampleNode.paths[4] // no separator in the descendants, no forks - const fork3 = node.getForkAtPath(path3) - expect(checkForSeparator(fork3.node)).toBeFalsy() -}) - -it('should throw exception on serialize if there were no storage saves before', () => { - const node = initManifestNode() - const randAddress = gen32Bytes() - const path = new TextEncoder().encode('vmi') - node.addFork(path, randAddress) - expect(() => node.serialize()).toThrowError() -}) - -it('checks the expected structure of the sample mantaray node', () => { - const sampleNode = getSampleMantarayNode() - const node = sampleNode.node - const path1 = sampleNode.paths[0] - const path2 = sampleNode.paths[1] - const path3 = sampleNode.paths[2] - const path5 = sampleNode.paths[4] - - expect(Object.keys(node.forks)).toStrictEqual([String(path1[0])]) // first level: 'p' - const secondLevelFork = node.forks![path5[0]] - expect(secondLevelFork.prefix).toStrictEqual(new TextEncoder().encode('path')) - const secondLevelNode = secondLevelFork.node - expect(Object.keys(secondLevelNode.forks)).toStrictEqual([String(path1[4]), String(path5[4])]) // second level: '1', '2' - const thirdLevelFork2 = secondLevelNode.forks[path5[4]] - expect(thirdLevelFork2.prefix).toStrictEqual(new Uint8Array([path5[4]])) - const thirdLevelFork1 = secondLevelNode.forks[path1[4]] - expect(thirdLevelFork1.prefix).toStrictEqual(new TextEncoder().encode('1/valami')) - const thirdLevelNode1 = thirdLevelFork1.node - expect(Object.keys(thirdLevelNode1.forks)).toStrictEqual([String(path1[12])]) // third level 1: '/' - const forthLevelFork1 = thirdLevelNode1.forks![path1[12]] - expect(forthLevelFork1.prefix).toStrictEqual(new Uint8Array([path1[12]])) - const fourthLevelNode1 = forthLevelFork1.node - expect(Object.keys(fourthLevelNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) // fourth level 1: 'e', 'm' - const fifthLevelFork2 = fourthLevelNode1.forks![path2[13]] - expect(fifthLevelFork2.prefix).toStrictEqual(new TextEncoder().encode('masodik')) - const fifthLevelNode2 = fifthLevelFork2.node - expect(Object.keys(fifthLevelNode2.forks)).toStrictEqual([String(path3[20])]) // fifth level 2: '.' - const sixthLevelNode1 = fifthLevelNode2.forks[path3[20]] - expect(sixthLevelNode1.prefix).toStrictEqual(new TextEncoder().encode('.ext')) -}) - -it('should remove forks', () => { - const sampleNode = getSampleMantarayNode() - const node = sampleNode.node - // save sample node - const path1 = sampleNode.paths[0] - const path2 = sampleNode.paths[1] - - // non existing path check - expect(() => node.removePath(new Uint8Array([0, 1, 2]))).toThrowError() - // node where the fork set will change - const checkNode1 = node.getForkAtPath(new TextEncoder().encode('path1/valami/')).node - // current forks of node - expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) - node.removePath(path2) - // 'm' key of prefix table disappeared - expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13])]) -}) diff --git a/test/unit/mantaray-0_2.spec.ts b/test/unit/mantaray-0_2.spec.ts new file mode 100644 index 0000000..df1ab61 --- /dev/null +++ b/test/unit/mantaray-0_2.spec.ts @@ -0,0 +1,92 @@ +import { initManifestNode, MantarayV0_2 } from '../../src' +import { checkForSeparator } from '../../src/mantaray-v0_2' +import { gen32Bytes } from '../utils' +import { getSampleMantarayNode0_2 } from '../utils' + +const { MantarayNode } = MantarayV0_2 + +describe('Mantaray 0.2 Unit Tests', () => { + it('should init a single mantaray node with a random address', () => { + const node = initManifestNode({ version: '0.2' }) + const randAddress = gen32Bytes() + node.setEntry = randAddress + const serialized = node.serialize() + const nodeAgain = new MantarayNode() + nodeAgain.deserialize(serialized) + expect(randAddress).toStrictEqual(nodeAgain.getEntry) + }) + + it('tests getForkAtPath method of node and checkForSeparator function', () => { + const sampleNode = getSampleMantarayNode0_2() + const node = sampleNode.node + expect(() => node.getForkAtPath(new TextEncoder().encode('path/not/exists'))).toThrowError() + + const fork1 = node.getForkAtPath(new TextEncoder().encode('path1/valami/')) // no separator in the descendants + expect(checkForSeparator(fork1.node)).toBeFalsy() + + const path2 = sampleNode.paths[3] // separator in the descendants + const fork2 = node.getForkAtPath(path2) + expect(checkForSeparator(fork2.node)).toBeTruthy() + + const path3 = sampleNode.paths[4] // no separator in the descendants, no forks + const fork3 = node.getForkAtPath(path3) + expect(checkForSeparator(fork3.node)).toBeFalsy() + }) + + it('should throw exception on serialize if there were no storage saves before', () => { + const node = initManifestNode({ version: '0.2' }) + const randAddress = gen32Bytes() + const path = new TextEncoder().encode('vmi') + node.addFork(path, randAddress) + expect(() => node.serialize()).toThrowError() + }) + + it('checks the expected structure of the sample mantaray node', () => { + const sampleNode = getSampleMantarayNode0_2() + const node = sampleNode.node + const path1 = sampleNode.paths[0] + const path2 = sampleNode.paths[1] + const path3 = sampleNode.paths[2] + const path5 = sampleNode.paths[4] + + expect(Object.keys(node.forks)).toStrictEqual([String(path1[0])]) // first level: 'p' + const secondLevelFork = node.forks![path5[0]] + expect(secondLevelFork.prefix).toStrictEqual(new TextEncoder().encode('path')) + const secondLevelNode = secondLevelFork.node + expect(Object.keys(secondLevelNode.forks)).toStrictEqual([String(path1[4]), String(path5[4])]) // second level: '1', '2' + const thirdLevelFork2 = secondLevelNode.forks[path5[4]] + expect(thirdLevelFork2.prefix).toStrictEqual(new Uint8Array([path5[4]])) + const thirdLevelFork1 = secondLevelNode.forks[path1[4]] + expect(thirdLevelFork1.prefix).toStrictEqual(new TextEncoder().encode('1/valami')) + const thirdLevelNode1 = thirdLevelFork1.node + expect(Object.keys(thirdLevelNode1.forks)).toStrictEqual([String(path1[12])]) // third level 1: '/' + const forthLevelFork1 = thirdLevelNode1.forks![path1[12]] + expect(forthLevelFork1.prefix).toStrictEqual(new Uint8Array([path1[12]])) + const fourthLevelNode1 = forthLevelFork1.node + expect(Object.keys(fourthLevelNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) // fourth level 1: 'e', 'm' + const fifthLevelFork2 = fourthLevelNode1.forks![path2[13]] + expect(fifthLevelFork2.prefix).toStrictEqual(new TextEncoder().encode('masodik')) + const fifthLevelNode2 = fifthLevelFork2.node + expect(Object.keys(fifthLevelNode2.forks)).toStrictEqual([String(path3[20])]) // fifth level 2: '.' + const sixthLevelNode1 = fifthLevelNode2.forks[path3[20]] + expect(sixthLevelNode1.prefix).toStrictEqual(new TextEncoder().encode('.ext')) + }) + + it('should remove forks', () => { + const sampleNode = getSampleMantarayNode0_2() + const node = sampleNode.node + // save sample node + const path1 = sampleNode.paths[0] + const path2 = sampleNode.paths[1] + + // non existing path check + expect(() => node.removePath(new Uint8Array([0, 1, 2]))).toThrowError() + // node where the fork set will change + const checkNode1 = node.getForkAtPath(new TextEncoder().encode('path1/valami/')).node + // current forks of node + expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) + node.removePath(path2) + // 'm' key of prefix table disappeared + expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13])]) + }) +}) diff --git a/test/unit/mantaray-1_0.spec.ts b/test/unit/mantaray-1_0.spec.ts new file mode 100644 index 0000000..67a8581 --- /dev/null +++ b/test/unit/mantaray-1_0.spec.ts @@ -0,0 +1,213 @@ +import { initManifestNode, MantarayV1 } from '../../src' +import { gen32Bytes } from '../utils' +import { getSampleMantarayNode1_0 } from '../utils' + +const { MantarayNode } = MantarayV1 + +/** Used for checking correct serialisation of the node */ +function serialiseDeserialise(node: MantarayV1.MantarayNode): MantarayV1.MantarayNode { + const serialized = node.serialize() + const nodeAgain = new MantarayNode() + nodeAgain.deserialize(serialized) + + return nodeAgain +} + +describe('Mantaray 1.0 Unit Tests', () => { + it('should init a single mantaray node with a random address', () => { + const node = initManifestNode({ version: '1.0' }) + const randAddress = gen32Bytes() + node.entry = randAddress + const obfuscationKey = gen32Bytes() + node.obfuscationKey = obfuscationKey + const nodeAgain = serialiseDeserialise(node) + expect(randAddress).toStrictEqual(nodeAgain.entry) + expect(obfuscationKey).toStrictEqual(nodeAgain.obfuscationKey) + }) + + it('should throw exception on serialize if there were no storage saves before', () => { + const node = initManifestNode() + const randAddress = gen32Bytes() + const path = new TextEncoder().encode('vmi') + node.addFork(path, { entry: randAddress }) + expect(() => node.serialize()).toThrowError() + }) + + it('should throw error if no random key generator function was passed when obfuscation key defined', () => { + const node = initManifestNode({ version: '1.0' }) + node.obfuscationKey = gen32Bytes() + const path = new TextEncoder().encode('vmi') + expect(() => node.addFork(path)).toThrowError(/^Obfuscation key generator is not passed/) + }) + + it('tests expected node attributes one-by-one', () => { + const sampleNode = getSampleMantarayNode1_0() + const node = sampleNode.node + const { fork1, fork2, fork3, fork4, fork5 } = sampleNode.forks + expect(() => node.getForkAtPath(new TextEncoder().encode('path/not/exists'))).toThrowError() + + /// FORK1 + const fork1Object = node.getForkAtPath(fork1.path) + //entry + expect(fork1Object.node.entry).toBe(fork1.entry) + expect(fork1Object.node.encEntry).toBe(true) + expect(fork1Object.node.hasEntry).toBe(Boolean(fork1.entry)) + expect(fork1Object.node.isValueType()).toBe(Boolean(fork1.entry)) + //forkmetadata and nodemetadata + expect(fork1Object.node.forkMetadata).toStrictEqual(fork1.forkMetadata) + expect(fork1Object.node.metadata).toStrictEqual(fork1.forkMetadata) + expect(fork1Object.node.nodeMetadata).toStrictEqual(fork1.nodeMetadata) + expect(fork1Object.node.isWithMetadataType()).toBe(true) + //other attributes + expect(fork1Object.node.isEdge).toBe(false) + expect(fork1Object.node.isContinuousNode).toBe(false) + expect(fork1Object.node.isDirty()).toBe(true) + + //FORK2 + const fork2Object = node.getForkAtPath(fork2.path) + //entry + expect(fork2Object.node.entry).toBe(fork2.entry) + expect(fork2Object.node.encEntry).toBe(false) + expect(fork2Object.node.hasEntry).toBe(Boolean(fork2.entry)) + expect(fork2Object.node.isValueType()).toBe(Boolean(fork2.entry)) + //forkmetadata and nodemetadata + expect(fork2Object.node.forkMetadata).toStrictEqual(fork2.forkMetadata) + expect(fork2Object.node.metadata).toStrictEqual(fork2.forkMetadata) + expect(fork2Object.node.nodeMetadata).toStrictEqual(fork2.nodeMetadata) + expect(fork2Object.node.isWithMetadataType()).toBe(false) + //other attributes + expect(fork2Object.node.isEdge).toBe(true) + expect(fork2Object.node.isContinuousNode).toBe(false) + expect(fork2Object.node.isDirty()).toBe(true) + + //FORK3 + const fork3Object = node.getForkAtPath(fork3.path) + //entry + expect(fork3Object.node.entry).toBe(fork3.entry) + expect(fork3Object.node.encEntry).toBe(false) + expect(fork3Object.node.hasEntry).toBe(Boolean(fork3.entry)) + expect(fork3Object.node.isValueType()).toBe(Boolean(fork3.entry)) + //forkmetadata and nodemetadata + expect(fork3Object.node.forkMetadata).toStrictEqual(fork3.forkMetadata) + expect(fork3Object.node.metadata).toStrictEqual(fork3.nodeMetadata) + expect(fork3Object.node.nodeMetadata).toStrictEqual(fork3.nodeMetadata) + expect(fork3Object.node.isWithMetadataType()).toBe(true) + //other attributes + expect(fork3Object.node.isEdge).toBe(false) + expect(fork3Object.node.isContinuousNode).toBe(false) + expect(fork3Object.node.isDirty()).toBe(true) + + //FORK4 + const fork4Object = node.getForkAtPath(fork4.path) + //entry + expect(fork4Object.node.entry).toBe(fork4.entry) + expect(fork4Object.node.encEntry).toBe(false) + expect(fork4Object.node.hasEntry).toBe(Boolean(fork4.entry)) + expect(fork4Object.node.isValueType()).toBe(Boolean(fork4.entry)) + //forkmetadata and nodemetadata + expect(fork4Object.node.forkMetadata).toStrictEqual(fork4.forkMetadata) + expect(fork4Object.node.metadata).toStrictEqual({ ...fork4.nodeMetadata, ...fork4.forkMetadata }) // has to overwrite nodeMetadata + expect(fork4Object.node.nodeMetadata).toStrictEqual(fork4.nodeMetadata) + expect(fork4Object.node.isWithMetadataType()).toBe(true) + //other attributes + expect(fork4Object.node.isEdge).toBe(true) + expect(fork4Object.node.isContinuousNode).toBe(false) + expect(fork4Object.node.isDirty()).toBe(true) + + //FORK5 + const fork5Object = node.getForkAtPath(fork5.path) + //entry + expect(fork5Object.node.entry).toBe(fork5.entry) + expect(fork5Object.node.encEntry).toBe(false) + expect(fork5Object.node.hasEntry).toBe(Boolean(fork5.entry)) + expect(fork5Object.node.isValueType()).toBe(Boolean(fork5.entry)) + //forkmetadata and nodemetadata + expect(fork5Object.node.forkMetadata).toStrictEqual(fork5.forkMetadata) + expect(fork5Object.node.metadata).toStrictEqual(fork5.forkMetadata) + expect(fork5Object.node.nodeMetadata).toStrictEqual(fork5.nodeMetadata) + expect(fork5Object.node.isWithMetadataType()).toBe(false) + //other attributes + expect(fork5Object.node.isEdge).toBe(false) + expect(fork5Object.node.isContinuousNode).toBe(false) + expect(fork5Object.node.isDirty()).toBe(true) + }) + + it('checks the expected structure of the sample mantaray node', () => { + const sampleNode = getSampleMantarayNode1_0() + const node = sampleNode.node + const { fork1, fork2, fork3, fork5, fork6 } = sampleNode.forks + const path1 = fork1.path + const path2 = fork2.path + const path3 = fork3.path + const path5 = fork5.path + const path6 = fork6.path + + expect(Object.keys(node.forks)).toStrictEqual([String(path1[0])]) // first level: 'p' + const secondLevelFork = node.forks![path5[0]] + expect(secondLevelFork.prefix).toStrictEqual(new TextEncoder().encode('path')) + const secondLevelNode = secondLevelFork.node + expect(Object.keys(secondLevelNode.forks)).toStrictEqual([String(path1[4]), String(path5[4]), String(path6[4])]) // second level: '1', '2', '3' + const thirdLevelFork2 = secondLevelNode.forks[path5[4]] + expect(thirdLevelFork2.prefix).toStrictEqual(new Uint8Array([path5[4]])) + const thirdLevelFork1 = secondLevelNode.forks[path1[4]] + expect(thirdLevelFork1.prefix).toStrictEqual(new TextEncoder().encode('1/valami')) + const thirdLevelNode1 = thirdLevelFork1.node + expect(Object.keys(thirdLevelNode1.forks)).toStrictEqual([String(path1[12])]) // third level 1: '/' + const thirdLevelFork3 = secondLevelNode.forks[path6[4]] // 'path3' + expect(thirdLevelFork3.prefix).toStrictEqual(new TextEncoder().encode('3/reallylongpathtotestcontinuou')) + const thirdLevelNode3 = thirdLevelFork3.node + expect(thirdLevelNode3.isContinuousNode).toBe(true) + expect(Object.keys(thirdLevelNode3.forks)).toStrictEqual([String(115)]) //'s' ASCII + const forthLevelFork1 = thirdLevelNode1.forks![path1[12]] + expect(forthLevelFork1.prefix).toStrictEqual(new Uint8Array([path1[12]])) + const fourthLevelNode1 = forthLevelFork1.node + expect(Object.keys(fourthLevelNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) // fourth level 1: 'e', 'm' + const fourthLevelFork2 = thirdLevelNode3.forks![115] // 's' ASCII + expect(fourthLevelFork2.prefix).toStrictEqual(new TextEncoder().encode('snodeandasyouseeiamstillwriting')) + const fourthLevelNode2 = fourthLevelFork2.node + expect(fourthLevelNode2.isContinuousNode).toBe(true) + expect(Object.keys(fourthLevelNode2.forks)).toStrictEqual([String(116)]) // 't' ASCII + const fifthLevelFork2 = fourthLevelNode1.forks![path2[13]] + expect(fifthLevelFork2.prefix).toStrictEqual(new TextEncoder().encode('masodik')) + const fifthLevelNode2 = fifthLevelFork2.node + const fifthLevelFork3 = fourthLevelNode2.forks![116] // 't' ASCII + expect(fifthLevelFork3.prefix).toStrictEqual(new TextEncoder().encode('this')) + expect(fifthLevelFork3.node.isContinuousNode).toBe(false) + expect(Object.keys(fifthLevelNode2.forks)).toStrictEqual([String(path3[20])]) // fifth level 2: '.' + const sixthLevelNode1 = fifthLevelNode2.forks[path3[20]] + expect(sixthLevelNode1.prefix).toStrictEqual(new TextEncoder().encode('.ext')) + }) + + it('should handle the continuous node on addFork properly', () => { + const sampleNode = getSampleMantarayNode1_0() + const node = sampleNode.node + const firstContinuousFork = node.getForkAtPath(new TextEncoder().encode('path3/reallylongpathtotestcontinuou')) + expect(firstContinuousFork.node.isContinuousNode).toBe(true) + expect(firstContinuousFork.prefix).toStrictEqual(new TextEncoder().encode('3/reallylongpathtotestcontinuou')) + node.addFork(new TextEncoder().encode('path3/reallylongpathtotestfork'), { + nodeMetadata: { test: 'firstForkingOnContinuousNode' }, + }) + const firstContinuousForkAgain = node.getForkAtPath(new TextEncoder().encode('path3/reallylongpathtotest')) + expect(firstContinuousForkAgain.node.isContinuousNode).toBe(false) + expect(firstContinuousForkAgain.prefix).toStrictEqual(new TextEncoder().encode('3/reallylongpathtotest')) + }) + + it('should remove forks', () => { + const sampleNode = getSampleMantarayNode1_0() + const node = sampleNode.node + const { fork1, fork2 } = sampleNode.forks + // save sample node + const path1 = fork1.path + const path2 = fork2.path + + // non existing path check + expect(() => node.removePath(new Uint8Array([0, 1, 2]))).toThrowError() + // node where the fork set will change + const checkNode1 = node.getForkAtPath(new TextEncoder().encode('path1/valami/')).node + // current forks of node + expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13]), String(path2[13])]) + node.removePath(path2) + // 'm' key of prefix table disappeared + expect(Object.keys(checkNode1.forks)).toStrictEqual([String(path1[13])]) + }) +}) diff --git a/test/unit/utils.spec.ts b/test/unit/utils.spec.ts new file mode 100644 index 0000000..24e63df --- /dev/null +++ b/test/unit/utils.spec.ts @@ -0,0 +1,14 @@ +import { serializeMetadataInSegment } from '../../src/utils' + +describe('utils', () => { + it('serializeMetadataInSegment', () => { + const testMetadata = { valami: 'test' } + const metadataSegment = serializeMetadataInSegment(testMetadata, 1) + expect(metadataSegment.length).toBe(32) + const metadataSegment2 = serializeMetadataInSegment(undefined, 10) + expect(metadataSegment2.length).toBe(320) + expect(() => serializeMetadataInSegment(testMetadata, 0)).toThrowError( + /^serialized metadata does not fit into the reserved/, + ) + }) +}) diff --git a/test/utils.ts b/test/utils.ts index c7932c3..bf984ec 100644 --- a/test/utils.ts +++ b/test/utils.ts @@ -1,28 +1,84 @@ -import { equalNodes, MantarayNode } from '../src/node' -import { gen32Bytes } from '../src/utils' +import { Utils } from '@ethersphere/bee-js' +import { MantarayNode, MantarayV0_2, MantarayV1, Reference, MetadataMapping, Bytes } from '../src' +import { MantarayFork } from '../src/mantaray-v1' +import { equalBytes } from '../src/utils' + +const { hexToBytes } = Utils.Hex declare global { // eslint-disable-next-line @typescript-eslint/no-namespace namespace jest { interface Matchers { - toBeEqualNode(compareTo: MantarayNode): R + toBeEqualNode0_2(compareTo: MantarayV0_2.MantarayNode): R + toBeEqualNode1_0(compareTo: MantarayV1.MantarayNode): R } } } +class NodesNotSame extends Error { + constructor(error: string, path: string) { + super(`"Error: ${error} \n\ton path: ${path}`) + } +} + +/** + * Lehmer random number generator with seed (minstd_rand in C++11) + * !!! Very fast but not well distributed pseudo-random function !!! + * + * @param seed Seed for the pseudo-random generator + */ +function lrng(seed: number): () => number { + return (): number => ((2 ** 31 - 1) & (seed = Math.imul(48271, seed))) / 2 ** 31 +} + +/** + * Utility function for generating random Buffer + * !!! IT IS NOT CRYPTO SAFE !!! + * For that use `crypto.randomBytes()` + * + * @param length Number of bytes to generate + * @param seed Seed for the pseudo-random generator + */ +export function gen32Bytes(seed?: number): Bytes<32> { + if (!seed) seed = new Date().getTime() + const rand = lrng(seed) + const buf = new Uint8Array(32) as Bytes<32> + + for (let i = 0; i < 32; ++i) { + buf[i] = (rand() * 0xff) << 0 + } + + return buf +} + /** * Load common own Jest Matchers which can be used to check particular return values. */ export function commonMatchers(): void { expect.extend({ - toBeEqualNode(received: MantarayNode, compareTo: MantarayNode) { + toBeEqualNode0_2(received: MantarayNode<'0.2'>, compareTo: MantarayNode<'0.2'>) { const result = { pass: true, message: () => 'Given Manatary nodes are equal', } try { - equalNodes(received, compareTo) + MantarayV0_2.equalNodes(received, compareTo) + } catch (e) { + result.pass = false + result.message = () => e.message + } + + return result + }, + toBeEqualNode1_0(received: MantarayNode<'1.0'>, compareTo: MantarayNode<'1.0'>) { + const result = { + pass: true, + message: () => 'Given Manatary nodes are equal', + } + + try { + equalNodes1_0(received, compareTo) } catch (e) { result.pass = false result.message = () => e.message @@ -33,8 +89,8 @@ export function commonMatchers(): void { }) } -export function getSampleMantarayNode(): { node: MantarayNode; paths: Uint8Array[] } { - const node = new MantarayNode() +export function getSampleMantarayNode0_2(): { node: MantarayNode<'0.2'>; paths: Uint8Array[] } { + const node: MantarayNode<'0.2'> = new MantarayV0_2.MantarayNode() const randAddress = gen32Bytes() node.setEntry = randAddress const path1 = new TextEncoder().encode('path1/valami/elso') @@ -53,3 +109,170 @@ export function getSampleMantarayNode(): { node: MantarayNode; paths: Uint8Array paths: [path1, path2, path3, path4, path5], } } + +type SampleFork1_0 = { + path: Uint8Array + entry?: Reference + nodeMetadata?: MetadataMapping + forkMetadata?: MetadataMapping +} + +type SampleForks1_0 = { + /** encrypted entry with forkMetadata */ + fork1: SampleFork1_0 + /** edge node with reference */ + fork2: SampleFork1_0 + /** leaf node with nodeMetadata without reference */ + fork3: SampleFork1_0 + /** edge node without reference with node and forkMetadata */ + fork4: SampleFork1_0 + /** standalone/empty leaf node */ + fork5: SampleFork1_0 + /** really long path to create continuous node */ + fork6: SampleFork1_0 +} + +export function getSampleMantarayNode1_0(): { node: MantarayNode<'1.0'>; forks: SampleForks1_0 } { + const node: MantarayNode<'1.0'> = new MantarayV1.MantarayNode() + const forks: SampleForks1_0 = { + fork1: { + path: new TextEncoder().encode('path1/valami/elso'), + entry: hexToBytes<32>( + '7d4ccc856f51d0477fde68f9f06bca97c6cd3b4a86b3369ea6489ceaf7b315577d4ccc856f51d0477fde68f9f06bca97c6cd3b4a86b3369ea6489ceaf7b31557', + ), + forkMetadata: { vmi: 'elso' }, + }, + fork2: { + path: new TextEncoder().encode('path1/valami/masodik'), + entry: hexToBytes<32>('4a07606f59562544dd37d26a219a65144e8cf3321b21276d8ea8de4af3ecee63'), + }, + fork3: { + path: new TextEncoder().encode('path1/valami/masodik.ext'), + nodeMetadata: { vmi2: 'harmadik' }, + }, + fork4: { + path: new TextEncoder().encode('path1/valami'), + forkMetadata: { vmi: 'negy' }, + nodeMetadata: { vmi: 'negy!', vmi2: 123 }, + }, + fork5: { + path: new TextEncoder().encode('path2'), + }, + fork6: { + path: new TextEncoder().encode('path3/reallylongpathtotestcontinuousnodeandasyouseeiamstillwritingthis'), + entry: hexToBytes<32>( + '7c4ccc856f51d0477fde68f9f06bca97c6cd3b4a86b3369ea6489ceaf7b315577d4ccc856f51d0477fde68f9f06bca97c6cd3b4a86b3369ea6489ceaf7b31557', + ), + }, + } + for (const fork of Object.values(forks)) { + node.addFork(fork.path, { + entry: fork.entry, + nodeMetadata: fork.nodeMetadata, + forkMetadata: fork.forkMetadata, + }) + } + + return { + node, + forks, + } +} + +/** + * Throws an error if the given nodes properties are not equal + * + * @param a Mantaray node to compare + * @param b Mantaray node to compare + * @param accumulatedPrefix accumulates the prefix during the recursion + * @throws Error if the two nodes properties are not equal recursively + */ +// eslint-disable-next-line complexity +export const equalNodes1_0 = (a: MantarayNode<'1.0'>, b: MantarayNode<'1.0'>, accumulatedPrefix = ''): void | never => { + // node flags comparisation + if (a.isContinuousNode !== b.isContinuousNode) { + throw new NodesNotSame( + `Nodes do not have same isContinuousNode flags. a: ${a.isContinuousNode} ; b: ${b.isContinuousNode}`, + accumulatedPrefix, + ) + } + + if (a.hasEntry !== b.hasEntry) { + throw new NodesNotSame( + `Nodes do not have same hasEntry flags. a: ${a.hasEntry} ; b: ${b.hasEntry}`, + accumulatedPrefix, + ) + } + + if (Boolean(a.encEntry) !== Boolean(b.encEntry)) { + throw new NodesNotSame( + `Nodes do not have same encEntry flags. a: ${a.encEntry} ; b: ${b.encEntry}\n\tAccumulated prefix: ${accumulatedPrefix}`, + accumulatedPrefix, + ) + } + + if (a.isEdge !== b.isEdge) { + throw new NodesNotSame(`Nodes do not have same isEdge flags. a: ${a.isEdge} ; b: ${b.isEdge}`, accumulatedPrefix) + } + + if (a.forkMetadataSegmentSize !== b.forkMetadataSegmentSize) { + throw new NodesNotSame( + `Nodes do not have same forkMetadataSegmentSize. a: ${a.forkMetadataSegmentSize} ; b: ${b.forkMetadataSegmentSize}`, + accumulatedPrefix, + ) + } + + // node metadata comparisation + if (!a.nodeMetadata !== !b.nodeMetadata) { + throw new NodesNotSame( + `One of the nodes does not have nodeMetadata defined. a: ${a.nodeMetadata} b: ${b.nodeMetadata}`, + accumulatedPrefix, + ) + } + + if (a.nodeMetadata && b.nodeMetadata) { + expect(a.nodeMetadata).toStrictEqual(b.nodeMetadata) + } + + // node metadata comparisation + if (!a.forkMetadata !== !b.forkMetadata) { + throw new NodesNotSame( + `One of the nodes does not have forkMetadata defined. a: ${a.forkMetadata} b: ${b.forkMetadata}`, + accumulatedPrefix, + ) + } + + if (a.forkMetadata && b.forkMetadata) { + expect(a.forkMetadata).toStrictEqual(b.forkMetadata) + } + + // node entry comparisation + if (!equalBytes(a.entry || new Uint8Array(0), b.entry || new Uint8Array(0))) { + throw new NodesNotSame(`Nodes do not have same entries. a: ${a.entry} ; b: ${b.entry}`, accumulatedPrefix) + } + + if (!a.forks) return + + // node fork comparisation + const aKeys = Object.keys(a.forks) + + if (!b.forks || aKeys.length !== Object.keys(b.forks).length) { + throw new NodesNotSame( + `Nodes do not have same fork length on equality check at prefix ${accumulatedPrefix}`, + accumulatedPrefix, + ) + } + + for (const key of aKeys) { + const aFork: MantarayFork = a.forks[Number(key)] + const bFork: MantarayFork = b.forks[Number(key)] + const prefix = aFork.prefix + const prefixString = new TextDecoder().decode(prefix) + + if (!equalBytes(prefix, bFork.prefix)) { + throw new NodesNotSame(`Nodes do not have same prefix under the same key "${key}"`, accumulatedPrefix) + } + + equalNodes1_0(aFork.node, bFork.node, accumulatedPrefix + prefixString) + } +}