From 30ef2fa8dde9cfeef8b88c190c7976c1d3379fa2 Mon Sep 17 00:00:00 2001 From: Kaung Myat Thu <2962000kaungmyatthu@gmail.com> Date: Thu, 6 Jun 2024 23:31:38 -0700 Subject: [PATCH] GOLD-101 stringify migration (#25) * feat: migrate to unified stringify standard lib * fix: avoid build path import introduced by prior commit * fix(test): added missing replacements to safe stringify and code removal * fix(json): fixed deserialisation function * feat(socket): using string to decode socket response from server * fix: add parsing/stringify for bigint in fastify * fix: customSerializerPlugin + custom parse for `getJson` and `postJson` * fix(server): serialisation hook simplification * fix(logs): simplified logs and removed unused * Update @shardus/types to version 1.2.13 * fix(rename): crypto util stringify indentifier rename * 3.4.18-0 * 3.4.18 --------- Co-authored-by: Arham Jain Co-authored-by: CombatPug Co-authored-by: Jintu Das --- package-lock.json | 12 +- package.json | 4 +- scripts/api_tester.ts | 3 +- scripts/archiver_data_patcher.ts | 7 +- scripts/archiver_data_sync_check.ts | 5 +- scripts/create_shut_down_cycle.ts | 2 +- scripts/repair_missing_cycle.ts | 2 +- scripts/update_network_account.ts | 2 +- scripts/verify_account_hash.ts | 2 +- src/API.ts | 14 +- src/Config.ts | 7 +- src/Crypto.ts | 8 +- src/Data/AccountDataProvider.ts | 2 +- src/Data/Collector.ts | 11 +- src/Data/Data.ts | 230 +++++++++++++------------ src/LostArchivers.ts | 2 +- src/P2P.ts | 20 ++- src/ShardFunctions.ts | 10 +- src/Utils.ts | 17 +- src/archivedCycle/Gossip.ts | 3 +- src/archivedCycle/StateMetaData.ts | 88 +++++----- src/profiler/StringifyReduce.ts | 8 +- src/profiler/memoryReporting.ts | 3 +- src/server.ts | 17 +- src/shardeum/verifyAppReceiptData.ts | 13 +- src/test/dataSync/mulitpleArchivers.ts | 5 +- src/utils/serialization.ts | 57 +----- src/utils/stringify.ts | 198 --------------------- 28 files changed, 271 insertions(+), 481 deletions(-) delete mode 100644 src/utils/stringify.ts diff --git a/package-lock.json b/package-lock.json index 72ea170b..a26531f2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,19 +1,19 @@ { "name": "@shardus/archiver", - "version": "3.4.17", + "version": "3.4.18", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@shardus/archiver", - "version": "3.4.17", + "version": "3.4.18", "license": "ISC", "dependencies": { "@fastify/cors": "^8.2.0", "@fastify/rate-limit": "^7.6.0", "@shardus/archiver-discovery": "^1.1.0", "@shardus/crypto-utils": "4.1.3", - "@shardus/types": "1.2.10", + "@shardus/types": "1.2.13", "deepmerge": "^4.2.2", "fastify": "4.12.0", "log4js": "^6.3.0", @@ -619,9 +619,9 @@ "integrity": "sha512-QCCHm15dmEFkH+TUMUNMT/iCGCsjOR8z6/5AZasG7Gsu9CTL85V4L+Ny/SGjKzM9sdi1Vz8XARy96r+slUZBOg==" }, "node_modules/@shardus/types": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/@shardus/types/-/types-1.2.10.tgz", - "integrity": "sha512-5RlT3fN9Ea0x34LIpi4ouXPnj3PjmjjfVDf+WQx6pJGtKcGiwkc+jvZF6ERivgiUF5Xjg1aUHDJVScmetn1flA==" + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/@shardus/types/-/types-1.2.13.tgz", + "integrity": "sha512-rmpZzxNEmMpq1PGeVomkbnK9poBPyL027UNNmQFFT5LWsx1Rr6IJPcZilrVpEm4G592sFcdrJU0XkTUT6J2kaA==" }, "node_modules/@sindresorhus/is": { "version": "4.6.0", diff --git a/package.json b/package.json index 93c57616..3a088d4d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@shardus/archiver", - "version": "3.4.17", + "version": "3.4.18", "engines": { "node": "18.16.1" }, @@ -78,7 +78,7 @@ "@fastify/rate-limit": "^7.6.0", "@shardus/archiver-discovery": "^1.1.0", "@shardus/crypto-utils": "4.1.3", - "@shardus/types": "1.2.10", + "@shardus/types": "1.2.13", "deepmerge": "^4.2.2", "fastify": "4.12.0", "log4js": "^6.3.0", diff --git a/scripts/api_tester.ts b/scripts/api_tester.ts index 9a3f57e7..c308eb33 100644 --- a/scripts/api_tester.ts +++ b/scripts/api_tester.ts @@ -2,6 +2,7 @@ import * as crypto from '@shardus/crypto-utils' import fetch from 'node-fetch' import { join } from 'path' import { config, overrideDefaultConfig } from '../src/Config' +import { Utils as StringUtils } from '@shardus/types' const configFile = join(process.cwd(), 'archiver-config.json') overrideDefaultConfig(configFile) @@ -27,7 +28,7 @@ fetch(`${ARCHIVER_URL}/totalData`, { // fetch(`${ARCHIVER_URL}/receipt`, { // fetch(`${ARCHIVER_URL}/account`, { method: 'post', - body: JSON.stringify(data), + body: StringUtils.safeStringify(data), headers: { 'Content-Type': 'application/json' }, timeout: 2000, }) diff --git a/scripts/archiver_data_patcher.ts b/scripts/archiver_data_patcher.ts index 1547bd11..b925abdf 100644 --- a/scripts/archiver_data_patcher.ts +++ b/scripts/archiver_data_patcher.ts @@ -15,6 +15,7 @@ import * as Crypto from '../src/Crypto' import { join } from 'path' import * as Logger from '../src/Logger' import { startSaving } from '../src/saveConsoleOutput' +import { Utils as StringUtils } from '@shardus/types' const { MAX_RECEIPTS_PER_REQUEST, MAX_BETWEEN_CYCLES_PER_REQUEST, @@ -55,7 +56,7 @@ const runProgram = async (): Promise => { Crypto.setCryptoHashKey(hashKey) let logsConfig try { - logsConfig = JSON.parse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) + logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) } catch (err) { console.log('Failed to parse archiver log file:', err) } @@ -93,7 +94,7 @@ const runProgram = async (): Promise => { const downloadedReceipts = downloadedReceiptCountByCycles.receipts.filter((d) => d.cycle === j) const existingReceipts = receiptsCountByCycles.filter((d) => d.cycle === j) // console.log(j, downloadedReceipts, existingReceipts) - if (JSON.stringify(downloadedReceipts) !== JSON.stringify(existingReceipts)) { + if (StringUtils.safeStringify(downloadedReceipts) !== StringUtils.safeStringify(existingReceipts)) { console.log('Unmatched', j, downloadedReceipts, existingReceipts) const receipts = await fetchDataForCycle(archiver, DataType.RECEIPT, j) console.log('Downloaded receipts for cycle', j, ' -> ', receipts.length) @@ -129,7 +130,7 @@ const runProgram = async (): Promise => { ) const existingOriginalTxsData = originalTxsDataCountByCycles.filter((d) => d.cycle === j) // console.log(j, downloadedOriginalTxsData, existingOriginalTxsData) - if (JSON.stringify(downloadedOriginalTxsData) !== JSON.stringify(existingOriginalTxsData)) { + if (StringUtils.safeStringify(downloadedOriginalTxsData) !== StringUtils.safeStringify(existingOriginalTxsData)) { console.log('Unmatched', j, downloadedOriginalTxsData, existingOriginalTxsData) const originalTxsData = await fetchDataForCycle(archiver, DataType.ORIGINAL_TX_DATA, j) if (originalTxsData) { diff --git a/scripts/archiver_data_sync_check.ts b/scripts/archiver_data_sync_check.ts index 861de917..0d5555a4 100644 --- a/scripts/archiver_data_sync_check.ts +++ b/scripts/archiver_data_sync_check.ts @@ -67,7 +67,10 @@ const runProgram = async (): Promise => { } // console.dir(responses, { depth: null }) // save to file - writeFileSync(`archiver_${archiverInfo}_${startCycle}_${endCycle}_${URL}.json`, JSON.stringify(responses)) + writeFileSync( + `archiver_${archiverInfo}_${startCycle}_${endCycle}_${URL}.json`, + StringUtils.safeStringify(responses) + ) } } diff --git a/scripts/create_shut_down_cycle.ts b/scripts/create_shut_down_cycle.ts index b0b235d3..ed793ee6 100644 --- a/scripts/create_shut_down_cycle.ts +++ b/scripts/create_shut_down_cycle.ts @@ -41,7 +41,7 @@ const runProgram = async (): Promise => { Crypto.setCryptoHashKey(hashKey) let logsConfig try { - logsConfig = JSON.parse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) + logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) } catch (err) { console.log('Failed to parse archiver log file:', err) } diff --git a/scripts/repair_missing_cycle.ts b/scripts/repair_missing_cycle.ts index 921941df..43188aea 100644 --- a/scripts/repair_missing_cycle.ts +++ b/scripts/repair_missing_cycle.ts @@ -20,7 +20,7 @@ const start = async (): Promise => { Crypto.setCryptoHashKey(hashKey) let logsConfig try { - logsConfig = JSON.parse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) + logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) } catch (err) { console.log('Failed to parse archiver log file:', err) } diff --git a/scripts/update_network_account.ts b/scripts/update_network_account.ts index 86bf921b..b72c5974 100644 --- a/scripts/update_network_account.ts +++ b/scripts/update_network_account.ts @@ -24,7 +24,7 @@ const runProgram = async (): Promise => { Crypto.setCryptoHashKey(hashKey) let logsConfig try { - logsConfig = JSON.parse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) + logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) } catch (err) { console.log('Failed to parse archiver log file:', err) } diff --git a/scripts/verify_account_hash.ts b/scripts/verify_account_hash.ts index d14ffe97..5eaf8686 100644 --- a/scripts/verify_account_hash.ts +++ b/scripts/verify_account_hash.ts @@ -20,7 +20,7 @@ const runProgram = async (): Promise => { Crypto.setCryptoHashKey(hashKey) let logsConfig try { - logsConfig = JSON.parse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) + logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) } catch (err) { console.log('Failed to parse archiver log file:', err) } diff --git a/src/API.ts b/src/API.ts index 74f8122f..202f7997 100644 --- a/src/API.ts +++ b/src/API.ts @@ -27,6 +27,7 @@ import * as AccountDataProvider from './Data/AccountDataProvider' import { getGlobalNetworkAccount } from './GlobalAccount' import { cycleRecordWithShutDownMode } from './Data/Cycles' import { isDebugMiddleware } from './DebugMode' +import { Utils as StringUtils } from '@shardus/types' const { version } = require('../package.json') // eslint-disable-line @typescript-eslint/no-var-requires const TXID_LENGTH = 64 @@ -299,7 +300,7 @@ export function registerRoutes(server: FastifyInstance { const gossipPayload = _request.body - if (config.VERBOSE) Logger.mainLogger.debug('Gossip Data received', JSON.stringify(gossipPayload)) + if (config.VERBOSE) + Logger.mainLogger.debug('Gossip Data received', StringUtils.safeStringify(gossipPayload)) const result = Collector.validateGossipData(gossipPayload) if (!result.success) { reply.send({ success: false, error: result.error }) @@ -855,7 +857,7 @@ export function registerRoutes(server: FastifyInstance { const payload = _request.body as AccountDataProvider.AccountDataRequestSchema - if (config.VERBOSE) Logger.mainLogger.debug('Account Data received', JSON.stringify(payload)) + if (config.VERBOSE) Logger.mainLogger.debug('Account Data received', StringUtils.safeStringify(payload)) const result = AccountDataProvider.validateAccountDataRequest(payload) // Logger.mainLogger.debug('Account Data validation result', result) if (!result.success) { @@ -873,7 +875,7 @@ export function registerRoutes(server: FastifyInstance { const payload = _request.body as AccountDataProvider.AccountDataByListRequestSchema - if (config.VERBOSE) Logger.mainLogger.debug('Account Data By List received', JSON.stringify(payload)) + if (config.VERBOSE) Logger.mainLogger.debug('Account Data By List received', StringUtils.safeStringify(payload)) const result = AccountDataProvider.validateAccountDataByListRequest(payload) // Logger.mainLogger.debug('Account Data By List validation result', result) if (!result.success) { @@ -891,7 +893,7 @@ export function registerRoutes(server: FastifyInstance { const payload = _request.body as AccountDataProvider.GlobalAccountReportRequestSchema - if (config.VERBOSE) Logger.mainLogger.debug('Global Account Report received', JSON.stringify(payload)) + if (config.VERBOSE) Logger.mainLogger.debug('Global Account Report received', StringUtils.safeStringify(payload)) const result = AccountDataProvider.validateGlobalAccountReportRequest(payload) // Logger.mainLogger.debug('Global Account Report validation result', result) if (!result.success) { @@ -1094,7 +1096,7 @@ export function registerRoutes(server: FastifyInstance { const gossipMessage = _request.body - Logger.mainLogger.debug('Gossip received', JSON.stringify(gossipMessage)) + Logger.mainLogger.debug('Gossip received', StringUtils.safeStringify(gossipMessage)) addHashesGossip(gossipMessage.sender, gossipMessage.data) const res = Crypto.sign({ success: true, diff --git a/src/Config.ts b/src/Config.ts index b0a7f920..c5789387 100644 --- a/src/Config.ts +++ b/src/Config.ts @@ -3,6 +3,7 @@ import * as Logger from './Logger' import * as merge from 'deepmerge' import * as minimist from 'minimist' import { join } from 'path' +import { Utils as StringUtils } from '@shardus/types' export interface Config { [index: string]: object | string | number | boolean @@ -106,7 +107,7 @@ let config: Config = { }, cycleRecordsCache: { enabled: false, - } + }, } // Override default config params from config file, env vars, and cli args export async function overrideDefaultConfig(file: string): Promise { @@ -116,7 +117,7 @@ export async function overrideDefaultConfig(file: string): Promise { // Override config from config file try { // eslint-disable-next-line security/detect-non-literal-fs-filename - const fileConfig = JSON.parse(fs.readFileSync(file, { encoding: 'utf8' })) + const fileConfig = StringUtils.safeJsonParse(fs.readFileSync(file, { encoding: 'utf8' })) const overwriteMerge = (target: [], source: []): [] => source config = merge(config, fileConfig, { arrayMerge: overwriteMerge }) } catch (err) { @@ -142,7 +143,7 @@ export async function overrideDefaultConfig(file: string): Promise { try { const parameterStr = env[param] if (parameterStr) { - const parameterObj = JSON.parse(parameterStr) + const parameterObj = StringUtils.safeJsonParse(parameterStr) config[param] = parameterObj } } catch (e) { diff --git a/src/Crypto.ts b/src/Crypto.ts index 2112bd5d..7931b6ab 100644 --- a/src/Crypto.ts +++ b/src/Crypto.ts @@ -1,13 +1,13 @@ import * as core from '@shardus/crypto-utils' import { SignedObject, TaggedObject, publicKey, curvePublicKey, sharedKey } from '@shardus/crypto-utils' +import { Utils as StringUtils } from '@shardus/types' import * as State from './State' -import { cryptoStringify } from './utils/stringify' // Crypto initialization fns export function setCryptoHashKey(hashkey: string): void { core.init(hashkey) - core.setCustomStringifier(cryptoStringify, 'shardus_crypto_stringify') + core.setCustomStringifier(StringUtils.safeStringify, 'shardus_safeStringify') } export const hashObj = core.hashObj @@ -16,7 +16,7 @@ export const hashObj = core.hashObj export type SignedMessage = SignedObject export function sign(obj: T): T & SignedObject { - const objCopy = JSON.parse(core.stringify(obj)) + const objCopy = StringUtils.safeJsonParse(core.stringify(obj)) core.signObj(objCopy, State.getSecretKey(), State.getNodeInfo().publicKey) return objCopy } @@ -55,7 +55,7 @@ export function getOrCreateSharedKey(pk: publicKey): sharedKey { export function tag(obj: T, recipientPk: publicKey): T & TaggedMessage { const sharedKey = getOrCreateSharedKey(recipientPk) - const objCopy = JSON.parse(core.stringify(obj)) + const objCopy = StringUtils.safeJsonParse(core.stringify(obj)) objCopy.publicKey = State.getNodeInfo().publicKey core.tagObj(objCopy, sharedKey) return objCopy diff --git a/src/Data/AccountDataProvider.ts b/src/Data/AccountDataProvider.ts index 1214acbc..e767970c 100644 --- a/src/Data/AccountDataProvider.ts +++ b/src/Data/AccountDataProvider.ts @@ -231,7 +231,7 @@ export const provideAccountDataRequest = async ( } } delta = tsEnd - highestTs - // Logger.mainLogger.debug('Account Data received', JSON.stringify(payload)) + // Logger.mainLogger.debug('Account Data received', StringUtils.safeStringify(payload)) // Logger.mainLogger.debug( // 'delta ' + delta, // 'tsEnd ' + tsEnd, diff --git a/src/Data/Collector.ts b/src/Data/Collector.ts index 86f9a5d6..96df5409 100644 --- a/src/Data/Collector.ts +++ b/src/Data/Collector.ts @@ -29,6 +29,7 @@ import { ConsensusNodeInfo } from '../NodeList' import { verifyAccountHash } from '../shardeum/calculateAccountHash' import { verifyAppReceiptData } from '../shardeum/verifyAppReceiptData' import { Cycle as DbCycle } from '../dbstore/types' +import { Utils as StringUtils } from '@shardus/types' export let storingAccountData = false const processedReceiptsMap: Map = new Map() @@ -227,7 +228,7 @@ const isReceiptRobust = async ( Logger.mainLogger.error( `The receipt validation failed from robustQuery nodes ${receipt.tx.txId} , ${receipt.cycle}, ${receipt.tx.timestamp}` ) - Logger.mainLogger.error(JSON.stringify(robustQueryReceipt), JSON.stringify(fullReceipt)) + Logger.mainLogger.error(StringUtils.safeStringify(robustQueryReceipt), StringUtils.safeStringify(fullReceipt)) } } Logger.mainLogger.error( @@ -611,7 +612,7 @@ export const storeReceiptData = async ( }) if (config.dataLogWrite && ReceiptLogWriter) ReceiptLogWriter.writeToLog( - `${JSON.stringify({ + `${StringUtils.safeStringify({ ...receipt, receiptId: tx.txId, timestamp: tx.timestamp, @@ -775,10 +776,10 @@ export const storeCycleData = async (cycles: P2PTypes.CycleCreatorTypes.CycleDat cycleMarker: cycleRecord.marker, cycleRecord, } - if (config.dataLogWrite && CycleLogWriter) CycleLogWriter.writeToLog(`${JSON.stringify(cycleObj)}\n`) + if (config.dataLogWrite && CycleLogWriter) CycleLogWriter.writeToLog(`${StringUtils.safeStringify(cycleObj)}\n`) const cycleExist = await queryCycleByMarker(cycleObj.cycleMarker) if (cycleExist) { - if (JSON.stringify(cycleObj) !== JSON.stringify(cycleExist)) + if (StringUtils.safeStringify(cycleObj) !== StringUtils.safeStringify(cycleExist)) await updateCycle(cycleObj.cycleMarker, cycleObj) } else { // await Cycle.insertCycle(cycleObj) @@ -885,7 +886,7 @@ export const storeOriginalTxData = async ( if (missingOriginalTxsMap.has(txId)) missingOriginalTxsMap.delete(txId) if (config.dataLogWrite && OriginalTxDataLogWriter) - OriginalTxDataLogWriter.writeToLog(`${JSON.stringify(originalTxData)}\n`) + OriginalTxDataLogWriter.writeToLog(`${StringUtils.safeStringify(originalTxData)}\n`) combineOriginalTxsData.push(originalTxData) txDataList.push({ txId, timestamp }) if (combineOriginalTxsData.length >= bucketSize) { diff --git a/src/Data/Data.ts b/src/Data/Data.ts index 7bd3af30..f98500c3 100644 --- a/src/Data/Data.ts +++ b/src/Data/Data.ts @@ -41,6 +41,7 @@ import { Transaction } from '../dbstore/transactions' import { AccountCopy } from '../dbstore/accounts' import { getJson } from '../P2P' import { robustQuery } from '../Utils' +import { Utils as StringUtils } from '@shardus/types' export const socketClients: Map = new Map() export let combineAccountsData = { @@ -210,132 +211,131 @@ export function initSocketClient(node: NodeList.ConsensusNodeInfo): void { Logger.mainLogger.debug(`Connection request is refused by the consensor node ${node.ip}:${node.port}`) }) - socketClient.on( - 'DATA', - (newData: DataResponse & Crypto.TaggedMessage) => { - if (!newData || !newData.responses) return - if (newData.recipient !== State.getNodeInfo().publicKey) { - Logger.mainLogger.debug('This data is not meant for this archiver') - return - } + socketClient.on('DATA', (data: string) => { + const newData: DataResponse & Crypto.TaggedMessage = + StringUtils.safeJsonParse(data) + if (!newData || !newData.responses) return + if (newData.recipient !== State.getNodeInfo().publicKey) { + Logger.mainLogger.debug('This data is not meant for this archiver') + return + } - // If tag is invalid, dont keepAlive, END - if (Crypto.authenticate(newData) === false) { - Logger.mainLogger.debug('This data cannot be authenticated') - unsubscribeDataSender(node.publicKey) + // If tag is invalid, dont keepAlive, END + if (Crypto.authenticate(newData) === false) { + Logger.mainLogger.debug('This data cannot be authenticated') + unsubscribeDataSender(node.publicKey) + return + } + + if (config.experimentalSnapshot) { + // Get sender entry + let sender = dataSenders.get(newData.publicKey) + // If no sender entry, remove publicKey from senders, END + if (!sender) { + Logger.mainLogger.error('This sender is not in the subscribed nodes list', newData.publicKey) + // unsubscribeDataSender(newData.publicKey) return } + // Clear senders contactTimeout, if it has one + if (sender.contactTimeout) { + if (config.VERBOSE) Logger.mainLogger.debug('Clearing contact timeout.') + clearTimeout(sender.contactTimeout) + sender.contactTimeout = null + nestedCountersInstance.countEvent('archiver', 'clear_contact_timeout') + } - if (config.experimentalSnapshot) { - // Get sender entry - let sender = dataSenders.get(newData.publicKey) - // If no sender entry, remove publicKey from senders, END - if (!sender) { - Logger.mainLogger.error('This sender is not in the subscribed nodes list', newData.publicKey) - // unsubscribeDataSender(newData.publicKey) - return - } - // Clear senders contactTimeout, if it has one - if (sender.contactTimeout) { - if (config.VERBOSE) Logger.mainLogger.debug('Clearing contact timeout.') - clearTimeout(sender.contactTimeout) - sender.contactTimeout = null - nestedCountersInstance.countEvent('archiver', 'clear_contact_timeout') - } + if (config.VERBOSE) + console.log('DATA', sender.nodeInfo.publicKey, sender.nodeInfo.ip, sender.nodeInfo.port) + if (newData.responses && newData.responses.ORIGINAL_TX_DATA) { if (config.VERBOSE) - console.log('DATA', sender.nodeInfo.publicKey, sender.nodeInfo.ip, sender.nodeInfo.port) - - if (newData.responses && newData.responses.ORIGINAL_TX_DATA) { - if (config.VERBOSE) - Logger.mainLogger.debug( - 'ORIGINAL_TX_DATA', - sender.nodeInfo.publicKey, - sender.nodeInfo.ip, - sender.nodeInfo.port, - newData.responses.ORIGINAL_TX_DATA.length - ) - storeOriginalTxData( - newData.responses.ORIGINAL_TX_DATA, - sender.nodeInfo.ip + ':' + sender.nodeInfo.port, - saveOnlyGossipData - ) - } - if (newData.responses && newData.responses.RECEIPT) { - if (config.VERBOSE) - Logger.mainLogger.debug( - 'RECEIPT', - sender.nodeInfo.publicKey, - sender.nodeInfo.ip, - sender.nodeInfo.port, - newData.responses.RECEIPT.length - ) - storeReceiptData( - newData.responses.RECEIPT, - sender.nodeInfo.ip + ':' + sender.nodeInfo.port, - true, - saveOnlyGossipData - ) - } - if (newData.responses && newData.responses.CYCLE) { - collectCycleData(newData.responses.CYCLE, sender.nodeInfo.ip + ':' + sender.nodeInfo.port) - } - if (newData.responses && newData.responses.ACCOUNT) { - console.log( - 'RECEIVED ACCOUNTS DATA', + Logger.mainLogger.debug( + 'ORIGINAL_TX_DATA', sender.nodeInfo.publicKey, sender.nodeInfo.ip, - sender.nodeInfo.port + sender.nodeInfo.port, + newData.responses.ORIGINAL_TX_DATA.length ) + storeOriginalTxData( + newData.responses.ORIGINAL_TX_DATA, + sender.nodeInfo.ip + ':' + sender.nodeInfo.port, + saveOnlyGossipData + ) + } + if (newData.responses && newData.responses.RECEIPT) { + if (config.VERBOSE) Logger.mainLogger.debug( - 'RECEIVED ACCOUNTS DATA', + 'RECEIPT', sender.nodeInfo.publicKey, sender.nodeInfo.ip, - sender.nodeInfo.port + sender.nodeInfo.port, + newData.responses.RECEIPT.length ) - nestedCountersInstance.countEvent('genesis', 'accounts', 1) - if (!forwardGenesisAccounts) { - console.log('Genesis Accounts To Sycn', newData.responses.ACCOUNT) - Logger.mainLogger.debug('Genesis Accounts To Sycn', newData.responses.ACCOUNT) - syncGenesisAccountsFromConsensor(newData.responses.ACCOUNT, sender.nodeInfo) - } else { - if (storingAccountData) { - console.log('Storing Data') - let newCombineAccountsData = { ...combineAccountsData } - if (newData.responses.ACCOUNT.accounts) - newCombineAccountsData.accounts = [ - ...newCombineAccountsData.accounts, - ...newData.responses.ACCOUNT.accounts, - ] - if (newData.responses.ACCOUNT.receipts) - newCombineAccountsData.receipts = [ - ...newCombineAccountsData.receipts, - ...newData.responses.ACCOUNT.receipts, - ] - combineAccountsData = { ...newCombineAccountsData } - newCombineAccountsData = { - accounts: [], - receipts: [], - } - } else storeAccountData(newData.responses.ACCOUNT) - } + storeReceiptData( + newData.responses.RECEIPT, + sender.nodeInfo.ip + ':' + sender.nodeInfo.port, + true, + saveOnlyGossipData + ) + } + if (newData.responses && newData.responses.CYCLE) { + collectCycleData(newData.responses.CYCLE, sender.nodeInfo.ip + ':' + sender.nodeInfo.port) + } + if (newData.responses && newData.responses.ACCOUNT) { + console.log( + 'RECEIVED ACCOUNTS DATA', + sender.nodeInfo.publicKey, + sender.nodeInfo.ip, + sender.nodeInfo.port + ) + Logger.mainLogger.debug( + 'RECEIVED ACCOUNTS DATA', + sender.nodeInfo.publicKey, + sender.nodeInfo.ip, + sender.nodeInfo.port + ) + nestedCountersInstance.countEvent('genesis', 'accounts', 1) + if (!forwardGenesisAccounts) { + console.log('Genesis Accounts To Sycn', newData.responses.ACCOUNT) + Logger.mainLogger.debug('Genesis Accounts To Sycn', newData.responses.ACCOUNT) + syncGenesisAccountsFromConsensor(newData.responses.ACCOUNT, sender.nodeInfo) + } else { + if (storingAccountData) { + console.log('Storing Data') + let newCombineAccountsData = { ...combineAccountsData } + if (newData.responses.ACCOUNT.accounts) + newCombineAccountsData.accounts = [ + ...newCombineAccountsData.accounts, + ...newData.responses.ACCOUNT.accounts, + ] + if (newData.responses.ACCOUNT.receipts) + newCombineAccountsData.receipts = [ + ...newCombineAccountsData.receipts, + ...newData.responses.ACCOUNT.receipts, + ] + combineAccountsData = { ...newCombineAccountsData } + newCombineAccountsData = { + accounts: [], + receipts: [], + } + } else storeAccountData(newData.responses.ACCOUNT) } + } - // Set new contactTimeout for sender. Postpone sender removal because data is still received from consensor - if (currentCycleDuration > 0) { - nestedCountersInstance.countEvent('archiver', 'postpone_contact_timeout') - // To make sure that the sender is still in the subscribed list - sender = dataSenders.get(newData.publicKey) - if (sender) - sender.contactTimeout = createContactTimeout( - sender.nodeInfo.publicKey, - 'This timeout is created after processing data' - ) - } - return + // Set new contactTimeout for sender. Postpone sender removal because data is still received from consensor + if (currentCycleDuration > 0) { + nestedCountersInstance.countEvent('archiver', 'postpone_contact_timeout') + // To make sure that the sender is still in the subscribed list + sender = dataSenders.get(newData.publicKey) + if (sender) + sender.contactTimeout = createContactTimeout( + sender.nodeInfo.publicKey, + 'This timeout is created after processing data' + ) } + return } - ) + }) } export function collectCycleData( @@ -413,7 +413,7 @@ export function collectCycleData( key, // marker /* eslint-disable security/detect-object-injection */ receivedCycleTracker[counter][key]['receivedTimes'], - logCycle ? JSON.stringify(receivedCycleTracker[counter][key]['senderNodes']) : '', + logCycle ? StringUtils.safeStringify(receivedCycleTracker[counter][key]['senderNodes']) : '', logCycle ? receivedCycleTracker[counter][key] : '' /* eslint-enable security/detect-object-injection */ ) @@ -810,7 +810,7 @@ export async function sendLeaveRequest(nodes: NodeList.ConsensusNodeInfo[]): Pro const promises = nodes.map((node) => fetch(`http://${node.ip}:${node.port}/leavingarchivers`, { method: 'post', - body: JSON.stringify(leaveRequest), + body: StringUtils.safeStringify(leaveRequest), headers: { 'Content-Type': 'application/json' }, timeout: 2 * 1000, // 2s timeout }).then((res) => res.json()) @@ -862,7 +862,7 @@ export async function sendActiveRequest(): Promise { const promises = nodes.map((node) => fetch(`http://${node.ip}:${node.port}/activearchiver`, { method: 'post', - body: JSON.stringify(activeRequest), + body: StringUtils.safeStringify(activeRequest), headers: { 'Content-Type': 'application/json' }, timeout: 2 * 1000, // 2s timeout }).then((res) => res.json()) @@ -2067,7 +2067,11 @@ export async function compareWithOldCyclesData(lastCycleCounter = 0): Promise(record: R): void { const debug = (message: any, ...args: any[]): void => Logger.mainLogger.debug(message, ...args) debug('>> handleLostArchivers()') debug(' config.ARCHIVER_PUBLIC_KEY: ' + config.ARCHIVER_PUBLIC_KEY) - // debug(' record: ' + JSON.stringify(record, null, 2)) + // debug(' record: ' + StringUtils.safeStringify(record, null, 2)) if (record && record.refutedArchivers && record.lostArchivers && record.removedArchivers) { if (record.refutedArchivers.some((publicKey) => publicKey === config.ARCHIVER_PUBLIC_KEY)) { diff --git a/src/P2P.ts b/src/P2P.ts index 74e4c601..cdfb5e04 100644 --- a/src/P2P.ts +++ b/src/P2P.ts @@ -7,6 +7,7 @@ import fetch from 'node-fetch' import { P2P as P2PTypes } from '@shardus/types' import { RequestInit, Response } from 'node-fetch' import { SignedObject } from '@shardus/crypto-utils' +import { Utils as StringUtils } from '@shardus/types' // eslint-disable-next-line @typescript-eslint/no-var-requires const { version } = require('../package.json') @@ -82,12 +83,18 @@ export async function postJson( try { const res = await fetch(url, { method: 'post', - body: JSON.stringify(body), + body: StringUtils.safeStringify(body), headers: { 'Content-Type': 'application/json' }, timeout: timeoutInSecond * 1000, }) if (res.ok) { - return await res.json() + const text = await res.text() + try { + return StringUtils.safeJsonParse(text) + } catch (parseError) { + console.warn(`getJson failed: invalid JSON response url: ${url} parseError: ${parseError}`) + return null + } } else { console.warn('postJson failed: got bad response') console.warn(res.headers) @@ -108,9 +115,16 @@ export async function getJson(url: string, timeoutInSecond = 5): Promise(fallback: Type, json: string, msg?: string): Type { - if (typeof json === 'object' && json !== null) { - return json - } - try { - return JSON.parse(json) - } catch (err) { - console.warn(msg ? msg : err) - return fallback - } -} - // From: https://stackoverflow.com/a/19270021 export function getRandom(arr: T[], n: number): T[] { let len = arr.length @@ -276,7 +265,7 @@ export async function robustQuery( } } if (finalResult) { - // Logger.mainLogger.debug(`In robustQuery stopping since we got a finalResult:${JSON.stringify(finalResult)}`) + // Logger.mainLogger.debug(`In robustQuery stopping since we got a finalResult:${StringUtils.safeStringify(finalResult)}`) return finalResult } else { // TODO: We return the item that had the most nodes reporting it. However, the caller should know @@ -333,7 +322,7 @@ export const deepCopy = (obj: T): T => { if (typeof obj !== 'object') { throw Error('Given element is not of type object.') } - return JSON.parse(JSON.stringify(obj)) + return StringUtils.safeJsonParse(StringUtils.safeStringify(obj)) } export const insertSorted = function (arr: T[], item: T, comparator?: (a: T, b: T) => number): void { diff --git a/src/archivedCycle/Gossip.ts b/src/archivedCycle/Gossip.ts index 443d11c8..5d652720 100644 --- a/src/archivedCycle/Gossip.ts +++ b/src/archivedCycle/Gossip.ts @@ -4,6 +4,7 @@ import * as State from '../State' import * as P2P from '../P2P' import { config } from '../Config' import * as Logger from '../Logger' +import { Utils as StringUtils } from '@shardus/types' interface HashItem { counter: number @@ -63,7 +64,7 @@ export async function sendGossip(type: string, payload: Record) try { Logger.mainLogger.debug( - `GossipingIn ${type} request to these nodes: ${JSON.stringify( + `GossipingIn ${type} request to these nodes: ${StringUtils.safeStringify( recipients.map((node) => node.ip + ':' + node.port + `/gossip-${type}`) )}` ) diff --git a/src/archivedCycle/StateMetaData.ts b/src/archivedCycle/StateMetaData.ts index e161173f..1a34fc96 100644 --- a/src/archivedCycle/StateMetaData.ts +++ b/src/archivedCycle/StateMetaData.ts @@ -35,6 +35,7 @@ import { profilerInstance } from '../profiler/profiler' // Socket modules export let socketServer: SocketIO.Server import * as ioclient from 'socket.io-client' +import { Utils as StringUtils } from '@shardus/types' let socketClient: SocketIOClientStatic['Socket'] export const socketClients: Map = new Map() const socketConnectionsTracker: Map = new Map() @@ -150,57 +151,56 @@ export function initSocketClient(node: NodeList.ConsensusNodeInfo): void { socketConnectionsTracker.set(node.publicKey, 'disconnected') }) - socketClient.on( - 'DATA', - (newData: DataResponse & Crypto.TaggedMessage) => { - if (!newData || !newData.responses) return - if (newData.recipient !== State.getNodeInfo().publicKey) { - Logger.mainLogger.debug('This data is not meant for this archiver') - return - } + socketClient.on('DATA', (data: string) => { + const newData: DataResponse & Crypto.TaggedMessage = + StringUtils.safeJsonParse(data) + if (!newData || !newData.responses) return + if (newData.recipient !== State.getNodeInfo().publicKey) { + Logger.mainLogger.debug('This data is not meant for this archiver') + return + } - // If tag is invalid, dont keepAlive, END - if (Crypto.authenticate(newData) === false) { - Logger.mainLogger.debug('This data cannot be authenticated') - console.log('Unsubscribe 1', node.publicKey) - unsubscribeDataSender(node.publicKey) - return - } + // If tag is invalid, dont keepAlive, END + if (Crypto.authenticate(newData) === false) { + Logger.mainLogger.debug('This data cannot be authenticated') + console.log('Unsubscribe 1', node.publicKey) + unsubscribeDataSender(node.publicKey) + return + } - if (newData.responses.STATE_METADATA.length > 0) Logger.mainLogger.debug('New DATA', newData.responses) - else Logger.mainLogger.debug('State metadata is empty') + if (newData.responses.STATE_METADATA.length > 0) Logger.mainLogger.debug('New DATA', newData.responses) + else Logger.mainLogger.debug('State metadata is empty') - currentDataSender = newData.publicKey - if (newData.responses && newData.responses.STATE_METADATA) { - // Logger.mainLogger.debug('New DATA from consensor STATE_METADATA', newData.publicKey, newData.responses.STATE_METADATA) - // let hashArray: any = Gossip.convertStateMetadataToHashArray(newData.responses.STATE_METADATA[0]) - for (const stateMetadata of newData.responses.STATE_METADATA) { - StateMetaDataMap.set(stateMetadata.counter, stateMetadata) - Gossip.sendGossip('hashes', stateMetadata) - } + currentDataSender = newData.publicKey + if (newData.responses && newData.responses.STATE_METADATA) { + // Logger.mainLogger.debug('New DATA from consensor STATE_METADATA', newData.publicKey, newData.responses.STATE_METADATA) + // let hashArray: any = Gossip.convertStateMetadataToHashArray(newData.responses.STATE_METADATA[0]) + for (const stateMetadata of newData.responses.STATE_METADATA) { + StateMetaDataMap.set(stateMetadata.counter, stateMetadata) + Gossip.sendGossip('hashes', stateMetadata) } + } - socketServer.emit('DATA', newData) - const sender = dataSenders.get(newData.publicKey) - // If publicKey is not in dataSenders, dont keepAlive, END - if (!sender) { - Logger.mainLogger.debug('NO SENDER') - return - } + socketServer.emit('DATA', newData) + const sender = dataSenders.get(newData.publicKey) + // If publicKey is not in dataSenders, dont keepAlive, END + if (!sender) { + Logger.mainLogger.debug('NO SENDER') + return + } - // If unexpected data type from sender, dont keepAlive, END - const newDataTypes = Object.keys(newData.responses) - for (const type of newDataTypes as (keyof typeof P2PTypes.SnapshotTypes.TypeNames)[]) { - if (sender.types.includes(type) === false) { - Logger.mainLogger.debug( - `NEW DATA type ${type} not included in sender's types: ${JSON.stringify(sender.types)}` - ) - return - } + // If unexpected data type from sender, dont keepAlive, END + const newDataTypes = Object.keys(newData.responses) + for (const type of newDataTypes as (keyof typeof P2PTypes.SnapshotTypes.TypeNames)[]) { + if (sender.types.includes(type) === false) { + Logger.mainLogger.debug( + `NEW DATA type ${type} not included in sender's types: ${StringUtils.safeStringify(sender.types)}` + ) + return } - setImmediate(processData, newData) } - ) + setImmediate(processData, newData) + }) } export function createDataRequest( @@ -1189,7 +1189,7 @@ export async function compareWithOldCyclesData( // eslint-disable-next-line security/detect-object-injection const oldCycle = oldCycles[i] console.log(downloadedCycle, oldCycle) - if (JSON.stringify(downloadedCycle) !== JSON.stringify(oldCycle)) { + if (StringUtils.safeStringify(downloadedCycle) !== StringUtils.safeStringify(oldCycle)) { return { success, cycle, diff --git a/src/profiler/StringifyReduce.ts b/src/profiler/StringifyReduce.ts index 519da7ea..f0092ee8 100644 --- a/src/profiler/StringifyReduce.ts +++ b/src/profiler/StringifyReduce.ts @@ -1,3 +1,5 @@ +import { Utils as StringUtils } from "@shardus/types" + export const makeShortHash = (x: string, n = 4): string => { if (!x) { return x @@ -85,13 +87,13 @@ export const stringifyReduce = (val: unknown, isArrayProp?: boolean): string => if (str) { str += ',' } - str += JSON.stringify(key) + ':' + propVal + str += StringUtils.safeStringify(key) + ':' + propVal } i++ } return '{' + str + '}' } else { - return JSON.stringify(val) + return StringUtils.safeStringify(val) } } case 'function': @@ -99,7 +101,7 @@ export const stringifyReduce = (val: unknown, isArrayProp?: boolean): string => return isArrayProp ? null : undefined case 'string': { const reduced = makeShortHash(val) - return JSON.stringify(reduced) + return StringUtils.safeStringify(reduced) } default: { const n = Number(val) diff --git a/src/profiler/memoryReporting.ts b/src/profiler/memoryReporting.ts index c01dd0e2..56a7234c 100644 --- a/src/profiler/memoryReporting.ts +++ b/src/profiler/memoryReporting.ts @@ -7,6 +7,7 @@ import { getActiveNodeCount } from '../NodeList' import { spawn } from 'child_process' import * as process from 'process' import { isDebugMiddleware } from '../DebugMode' +import { Utils as StringUtils } from '@shardus/types' type CounterMap = Map interface CounterNode { @@ -232,7 +233,7 @@ class MemoryReporting { multiStats.max = this.roundTo3decimals(multiStats.max * 100) multiStats.avg = this.roundTo3decimals(multiStats.avg * 100) - this.addToReport('Process', 'CPU', `cpu: ${JSON.stringify(multiStats)}`, 1) + this.addToReport('Process', 'CPU', `cpu: ${StringUtils.safeStringify(multiStats)}`, 1) const report = resourceUsage() for (const [key, value] of Object.entries(report)) { diff --git a/src/server.ts b/src/server.ts index 7072edc5..80f81529 100644 --- a/src/server.ts +++ b/src/server.ts @@ -39,6 +39,7 @@ import * as Collector from './Data/Collector' import { loadGlobalAccounts, syncGlobalAccount } from './GlobalAccount' import { setShutdownCycleRecord, cycleRecordWithShutDownMode } from './Data/Cycles' import { registerRoutes } from './API' +import { Utils as StringUtils } from '@shardus/types' const configFile = join(process.cwd(), 'archiver-config.json') let logDir: string @@ -66,7 +67,7 @@ async function start(): Promise { } let logsConfig try { - logsConfig = JSON.parse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) + logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) } catch (err) { console.log('Failed to parse archiver log file:', err) } @@ -438,6 +439,20 @@ async function startServer(): Promise { allowList: ['127.0.0.1', '0.0.0.0'], // Excludes local IPs from rate limits }) + server.addContentTypeParser('application/json', { parseAs: 'string' }, (req, body, done) => { + try { + const jsonString = typeof body === 'string' ? body : body.toString('utf8') + done(null, StringUtils.safeJsonParse(jsonString)) + } catch (err) { + err.statusCode = 400 + done(err, undefined) + } + }) + + server.setReplySerializer((payload) => { + return StringUtils.safeStringify(payload) + }) + initProfiler(server) // Initialize the data log writer diff --git a/src/shardeum/verifyAppReceiptData.ts b/src/shardeum/verifyAppReceiptData.ts index 74e78c4d..803ccb88 100644 --- a/src/shardeum/verifyAppReceiptData.ts +++ b/src/shardeum/verifyAppReceiptData.ts @@ -2,6 +2,7 @@ import { config } from '../Config' import * as crypto from '../Crypto' import * as Logger from '../Logger' import { ArchiverReceipt, Receipt } from '../dbstore/receipts' +import { Utils as StringUtils } from '@shardus/types' export type ShardeumReceipt = object & { amountSpent: string @@ -74,16 +75,16 @@ export const verifyAppReceiptData = async ( if (existingShardeumReceipt.amountSpent !== '0x0') { Logger.mainLogger.error( `Success and failed receipts with gas charged`, - JSON.stringify(existingReceipt), - JSON.stringify(receipt) + StringUtils.safeStringify(existingReceipt), + StringUtils.safeStringify(receipt) ) } else result = { valid: true, needToSave: true } // Success receipt } else { if (existingShardeumReceipt.amountSpent !== '0x0' && newShardeumReceipt.amountSpent !== '0x0') { Logger.mainLogger.error( `Both failed receipts with gas charged`, - JSON.stringify(existingReceipt), - JSON.stringify(receipt) + StringUtils.safeStringify(existingReceipt), + StringUtils.safeStringify(receipt) ) } else if (newShardeumReceipt.amountSpent !== '0x0') { // Failed receipt with gas charged @@ -93,8 +94,8 @@ export const verifyAppReceiptData = async ( } else if (newShardeumReceipt.readableReceipt.status === 1) { Logger.mainLogger.error( `Duplicate success receipt`, - JSON.stringify(existingReceipt), - JSON.stringify(receipt) + StringUtils.safeStringify(existingReceipt), + StringUtils.safeStringify(receipt) ) } } diff --git a/src/test/dataSync/mulitpleArchivers.ts b/src/test/dataSync/mulitpleArchivers.ts index 41a61e0e..ba94f31f 100644 --- a/src/test/dataSync/mulitpleArchivers.ts +++ b/src/test/dataSync/mulitpleArchivers.ts @@ -1,4 +1,5 @@ import { getJson } from '../../P2P' +import { Utils as StringUtils } from '@shardus/types' interface ReceiptResponse { receipts: Array<{ cycle: number; receipts: unknown[] }> @@ -76,11 +77,11 @@ export async function checkCyclesDataBetweenArchivers(ip, numberOfArchivers): Pr for (let i = 0; i < expectedCycles.length; i++) { // eslint-disable-next-line security/detect-object-injection const cycleInfo = expectedCycles[i] - const cycleInfoToMatch = JSON.stringify(cycleInfo) + const cycleInfoToMatch = StringUtils.safeStringify(cycleInfo) for (let j = 1; j < archiverInfos.length; j++) { // console.log(cycleInfo.counter, dataInfos[archiverInfos[j]][i].counter) // eslint-disable-next-line security/detect-object-injection - if (cycleInfoToMatch !== JSON.stringify(dataInfos[archiverInfos[j]][i])) { + if (cycleInfoToMatch !== StringUtils.safeStringify(dataInfos[archiverInfos[j]][i])) { allCyclesAreMatched = false console.log(`Cycle ${cycleInfo.counter} is not matched between archivers!`) } diff --git a/src/utils/serialization.ts b/src/utils/serialization.ts index 6928154a..558f051f 100644 --- a/src/utils/serialization.ts +++ b/src/utils/serialization.ts @@ -1,10 +1,10 @@ -import { stringify } from './stringify' +import { Utils as StringUtils } from '@shardus/types' import { config } from '../Config' export function SerializeToJsonString(obj: object): string { try { - if (config.useSerialization) return stringify(obj, { bufferEncoding: 'base64' }) - else return JSON.stringify(obj) + if (config.useSerialization) return StringUtils.safeStringify(obj, { bufferEncoding: 'base64' }) + else return StringUtils.safeStringify(obj) } catch (e) { console.log('Error serializing object', e) console.log(obj) @@ -14,59 +14,10 @@ export function SerializeToJsonString(obj: object): string { export function DeSerializeFromJsonString(jsonString: string): T { try { - if (config.useSerialization) return JSON.parse(jsonString, base64BufferReviver) - else return JSON.parse(jsonString) + return StringUtils.safeJsonParse(jsonString) } catch (e) { console.log('Error deserializing object', e) console.log(jsonString) throw e } } - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function base64BufferReviver(key: string, value: any): unknown { - const originalObject = value - if ( - isObject(originalObject) && - Object.prototype.hasOwnProperty.call(originalObject, 'dataType') && - originalObject.dataType && - originalObject.dataType == 'bh' - ) { - return new Uint8Array(GetBufferFromField(originalObject, 'base64')) - // } else if (value && isHexStringWithoutPrefix(value) && value.length !== 42 && value.length !== 64) { - // console.log('hex string', value) - // return BigInt('0x' + value) - } else { - return value - } -} - -export const isObject = (val): boolean => { - if (val === null) { - return false - } - if (Array.isArray(val)) { - return false - } - return typeof val === 'function' || typeof val === 'object' -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export function GetBufferFromField(input: any, encoding?: 'base64' | 'hex'): Buffer { - switch (encoding) { - case 'base64': - return Buffer.from(input.data, 'base64') - default: - return Buffer.from(input) - } -} - -export function isHexStringWithoutPrefix(value: string, length?: number): boolean { - if (value && typeof value === 'string' && value.indexOf('0x') >= 0) return false // do not convert strings with 0x - // prefix - if (typeof value !== 'string' || !value.match(/^[0-9A-Fa-f]*$/)) return false - - if (typeof length !== 'undefined' && length > 0 && value.length !== 2 + 2 * length) return false - - return true -} diff --git a/src/utils/stringify.ts b/src/utils/stringify.ts deleted file mode 100644 index f226d015..00000000 --- a/src/utils/stringify.ts +++ /dev/null @@ -1,198 +0,0 @@ -const objToString = Object.prototype.toString -const objKeys = - Object.keys || - function (obj): string[] { - const keys = [] - for (const name in obj) { - keys.push(name) - } - return keys - } - -export interface stringifierOptions { - bufferEncoding: 'base64' | 'hex' | 'none' -} - -const defaultStringifierOptions: stringifierOptions = { - bufferEncoding: 'base64', -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export function stringify(val: any, options: stringifierOptions = defaultStringifierOptions): string { - const returnVal = stringifier(val, false, options) - if (returnVal !== undefined) { - return '' + returnVal - } - return '' -} - -function isUnit8Array(value: unknown): boolean { - return value instanceof Uint8Array -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function stringifier(val: any, isArrayProp: boolean, options: stringifierOptions): string | null | undefined { - let i, max, str, keys, key, propVal, toStr - if (val === true) { - return 'true' - } - if (val === false) { - return 'false' - } - if (isUnit8Array(val)) { - val = Buffer.from(val) - } - /* eslint-disable security/detect-object-injection */ - switch (typeof val) { - case 'object': - if (val === null) { - return null - } else if (val.toJSON && typeof val.toJSON === 'function') { - return stringifier(val.toJSON(), isArrayProp, options) - } else { - toStr = objToString.call(val) - if (toStr === '[object Array]') { - str = '[' - max = val.length - 1 - for (i = 0; i < max; i++) { - str += stringifier(val[i], true, options) + ',' - } - if (max > -1) { - str += stringifier(val[i], true, options) - } - return str + ']' - } else if (options.bufferEncoding !== 'none' && isBufferValue(toStr, val)) { - switch (options.bufferEncoding) { - case 'base64': - return JSON.stringify({ - data: Buffer.from(val['data']).toString('base64'), - dataType: 'bh', - }) - case 'hex': - return JSON.stringify({ - data: Buffer.from(val['data']).toString('hex'), - dataType: 'bh', - }) - } - } else if (toStr === '[object Object]') { - // only object is left - keys = objKeys(val) - if (keys.length > 1 && keys[0] === '0' && keys[1] === '1') { - // convert to unit8array - const unit8Array = new Uint8Array(Object.values(val)) - return stringifier(unit8Array, false, options) - } - keys = keys.sort() - max = keys.length - str = '' - i = 0 - while (i < max) { - key = keys[i] - propVal = stringifier(val[key], false, options) - if (propVal !== undefined) { - if (str) { - str += ',' - } - str += JSON.stringify(key) + ':' + propVal - } - i++ - } - return '{' + str + '}' - } else { - return JSON.stringify(val) - } - } - // eslint-disable-next-line no-fallthrough - case 'undefined': - return isArrayProp ? null : undefined - case 'string': - return JSON.stringify(val) - case 'bigint': - // Add some special identifier for bigint - // return JSON.stringify({__BigInt__: val.toString()}) - return JSON.stringify(val.toString(16)) - default: - return isFinite(val) ? val : null - } - /* eslint-enable security/detect-object-injection */ -} - -function isBufferValue(toStr, val: object): boolean { - return ( - toStr === '[object Object]' && - objKeys(val).length == 2 && - objKeys(val).includes('type') && - val['type'] == 'Buffer' - ) -} - -/* cryptoStringifier is a close version of default fast-stringify-json that works with BigInts */ -function cryptoStringifier(val, isArrayProp): string { - let i, max, str, keys, key, propVal, toStr - if (val === true) { - return 'true' - } - if (val === false) { - return 'false' - } - /* eslint-disable security/detect-object-injection */ - switch (typeof val) { - case 'object': - if (val === null) { - return null - } else if (val.toJSON && typeof val.toJSON === 'function') { - return cryptoStringifier(val.toJSON(), isArrayProp) - } else { - toStr = objToString.call(val) - if (toStr === '[object Array]') { - str = '[' - max = val.length - 1 - for (i = 0; i < max; i++) { - str += cryptoStringifier(val[i], true) + ',' - } - if (max > -1) { - str += cryptoStringifier(val[i], true) - } - return str + ']' - } else if (toStr === '[object Object]') { - // only object is left - keys = objKeys(val).sort() - max = keys.length - str = '' - i = 0 - while (i < max) { - key = keys[i] - propVal = cryptoStringifier(val[key], false) - if (propVal !== undefined) { - if (str) { - str += ',' - } - str += JSON.stringify(key) + ':' + propVal - } - i++ - } - return '{' + str + '}' - } else { - return JSON.stringify(val) - } - } - case 'function': - case 'undefined': - return isArrayProp ? null : undefined - case 'string': - return JSON.stringify(val) - case 'bigint': - return JSON.stringify(val.toString(16)) - default: - return isFinite(val) ? val : null - } - /* eslint-enable security/detect-object-injection */ -} - -export function cryptoStringify(val: unknown, isArrayProp = false): string { - const returnVal = cryptoStringifier(val, isArrayProp) - if (returnVal !== undefined) { - return '' + returnVal - } - return '' -}