diff --git a/nimbus/db/aristo/aristo_blobify.nim b/nimbus/db/aristo/aristo_blobify.nim index 493a11497b..7245a6cebc 100644 --- a/nimbus/db/aristo/aristo_blobify.nim +++ b/nimbus/db/aristo/aristo_blobify.nim @@ -203,12 +203,12 @@ proc blobifyTo*(vtx: VertexRef; data: var Blob): Result[void,AristoError] = if vtx.ePfx.len > 0: vtx.ePfx.toHexPrefix(isleaf = false) else: - @[] + default(HexPrefixBuf) psLen = pSegm.len.byte if 33 < psLen: return err(BlobifyExtPathOverflow) - data &= pSegm + data &= pSegm.data() data &= lens.toBytesBE data &= [0x80u8 or psLen] @@ -219,16 +219,16 @@ proc blobifyTo*(vtx: VertexRef; data: var Blob): Result[void,AristoError] = if psLen == 0 or 33 < psLen: return err(BlobifyLeafPathOverflow) vtx.lData.blobifyTo(data) - data &= pSegm + data &= pSegm.data() data &= [0xC0u8 or psLen] ok() -proc blobify*(vtx: VertexRef): Result[Blob, AristoError] = +proc blobify*(vtx: VertexRef): Blob = ## Variant of `blobify()` - var data: Blob - ? vtx.blobifyTo data - ok(move(data)) + result = newSeqOfCap[byte](128) + if vtx.blobifyTo(result).isErr: + result.setLen(0) # blobify only fails on invalid verticies proc blobifyTo*(lSst: SavedState; data: var Blob): Result[void,AristoError] = ## Serialise a last saved state record @@ -246,45 +246,48 @@ proc blobify*(lSst: SavedState): Result[Blob,AristoError] = # ------------- proc deblobify( data: openArray[byte]; - T: type LeafPayload; - ): Result[LeafPayload,AristoError] = + pyl: var LeafPayload; + ): Result[void,AristoError] = if data.len == 0: - return ok LeafPayload(pType: RawData) + pyl = LeafPayload(pType: RawData) + return ok() let mask = data[^1] if (mask and 0x10) > 0: # unstructured payload - return ok LeafPayload(pType: RawData, rawBlob: data[0 .. ^2]) + pyl = LeafPayload(pType: RawData, rawBlob: data[0 .. ^2]) + return ok() if (mask and 0x20) > 0: # Slot storage data - return ok LeafPayload( + pyl = LeafPayload( pType: StoData, stoData: ?deblobify(data.toOpenArray(0, data.len - 2), UInt256)) + return ok() + pyl = LeafPayload(pType: AccountData) var - pAcc = LeafPayload(pType: AccountData) start = 0 lens = uint16.fromBytesBE(data.toOpenArray(data.len - 3, data.len - 2)) if (mask and 0x01) > 0: let len = lens and 0b111 - pAcc.account.nonce = ? load64(data, start, int(len + 1)) + pyl.account.nonce = ? load64(data, start, int(len + 1)) if (mask and 0x02) > 0: let len = (lens shr 3) and 0b11111 - pAcc.account.balance = ? load256(data, start, int(len + 1)) + pyl.account.balance = ? load256(data, start, int(len + 1)) if (mask and 0x04) > 0: let len = (lens shr 8) and 0b111 - pAcc.stoID = (true, VertexID(? load64(data, start, int(len + 1)))) + pyl.stoID = (true, VertexID(? load64(data, start, int(len + 1)))) if (mask and 0x08) > 0: if data.len() < start + 32: return err(DeblobCodeLenUnsupported) - discard pAcc.account.codeHash.data.copyFrom(data.toOpenArray(start, start + 31)) + discard pyl.account.codeHash.data.copyFrom(data.toOpenArray(start, start + 31)) else: - pAcc.account.codeHash = EMPTY_CODE_HASH + pyl.account.codeHash = EMPTY_CODE_HASH - ok(pAcc) + ok() proc deblobify*( record: openArray[byte]; @@ -336,11 +339,12 @@ proc deblobify*( NibblesBuf.fromHexPrefix record.toOpenArray(pLen, rLen-1) if not isLeaf: return err(DeblobLeafGotExtPrefix) - let pyl = ? record.toOpenArray(0, pLen - 1).deblobify(LeafPayload) - VertexRef( + let vtx = VertexRef( vType: Leaf, - lPfx: pathSegment, - lData: pyl) + lPfx: pathSegment) + + ? record.toOpenArray(0, pLen - 1).deblobify(vtx.lData) + vtx else: return err(DeblobUnknown) diff --git a/nimbus/db/aristo/aristo_compute.nim b/nimbus/db/aristo/aristo_compute.nim index e3c26d3f64..46a02f1c96 100644 --- a/nimbus/db/aristo/aristo_compute.nim +++ b/nimbus/db/aristo/aristo_compute.nim @@ -68,7 +68,7 @@ proc computeKeyImpl( case vtx.vType: of Leaf: writer.startList(2) - writer.append(vtx.lPfx.toHexPrefix(isLeaf = true)) + writer.append(vtx.lPfx.toHexPrefix(isLeaf = true).data()) case vtx.lData.pType of AccountData: @@ -111,7 +111,7 @@ proc computeKeyImpl( writeBranch(bwriter) writer.startList(2) - writer.append(vtx.ePfx.toHexPrefix(isleaf = false)) + writer.append(vtx.ePfx.toHexPrefix(isleaf = false).data()) writer.append(bwriter.finish().digestTo(HashKey)) else: writeBranch(writer) diff --git a/nimbus/db/aristo/aristo_desc/desc_nibbles.nim b/nimbus/db/aristo/aristo_desc/desc_nibbles.nim index 58ecc97088..48f67633dd 100644 --- a/nimbus/db/aristo/aristo_desc/desc_nibbles.nim +++ b/nimbus/db/aristo/aristo_desc/desc_nibbles.nim @@ -8,15 +8,20 @@ # at your option. This file may not be copied, modified, or distributed # except according to those terms. -import stew/arrayops +import stew/[arraybuf, arrayops] -type NibblesBuf* = object - ## Allocation-free type for storing up to 64 4-bit nibbles, as seen in the - ## Ethereum MPT - bytes: array[32, byte] - ibegin, iend: int8 - # Where valid nibbles can be found - we use indices here to avoid copies - # wen slicing - iend not inclusive +export arraybuf + +type + NibblesBuf* = object + ## Allocation-free type for storing up to 64 4-bit nibbles, as seen in the + ## Ethereum MPT + bytes: array[32, byte] + ibegin, iend: int8 + # Where valid nibbles can be found - we use indices here to avoid copies + # wen slicing - iend not inclusive + + HexPrefixBuf* = ArrayBuf[33, byte] func high*(T: type NibblesBuf): int = 63 @@ -61,7 +66,7 @@ func `$`*(r: NibblesBuf): string = const chars = "0123456789abcdef" result.add chars[r[i]] -func slice*(r: NibblesBuf, ibegin: int, iend = -1): NibblesBuf = +func slice*(r: NibblesBuf, ibegin: int, iend = -1): NibblesBuf {.noinit.} = result.bytes = r.bytes result.ibegin = r.ibegin + ibegin.int8 let e = @@ -75,7 +80,7 @@ func slice*(r: NibblesBuf, ibegin: int, iend = -1): NibblesBuf = template writeFirstByte(nibbleCountExpr) {.dirty.} = let nibbleCount = nibbleCountExpr var oddnessFlag = (nibbleCount and 1) != 0 - newSeq(result, (nibbleCount div 2) + 1) + result.setLen((nibbleCount div 2) + 1) result[0] = byte((int(isLeaf) * 2 + int(oddnessFlag)) shl 4) var writeHead = 0 @@ -89,11 +94,11 @@ template writeNibbles(r) {.dirty.} = result[writeHead] = nextNibble shl 4 oddnessFlag = not oddnessFlag -func toHexPrefix*(r: NibblesBuf, isLeaf = false): seq[byte] = +func toHexPrefix*(r: NibblesBuf, isLeaf = false): HexPrefixBuf = writeFirstByte(r.len) writeNibbles(r) -func toHexPrefix*(r1, r2: NibblesBuf, isLeaf = false): seq[byte] = +func toHexPrefix*(r1, r2: NibblesBuf, isLeaf = false): HexPrefixBuf = writeFirstByte(r1.len + r2.len) writeNibbles(r1) writeNibbles(r2) @@ -131,7 +136,7 @@ func fromHexPrefix*( else: result.isLeaf = false -func `&`*(a, b: NibblesBuf): NibblesBuf = +func `&`*(a, b: NibblesBuf): NibblesBuf {.noinit.} = for i in 0 ..< a.len: result[i] = a[i] diff --git a/nimbus/db/aristo/aristo_init/memory_db.nim b/nimbus/db/aristo/aristo_init/memory_db.nim index a90773ceb3..2e7c95a7a3 100644 --- a/nimbus/db/aristo/aristo_init/memory_db.nim +++ b/nimbus/db/aristo/aristo_init/memory_db.nim @@ -133,14 +133,7 @@ proc putVtxFn(db: MemBackendRef): PutVtxFn = let hdl = hdl.getSession db if hdl.error.isNil: if vtx.isValid: - let rc = vtx.blobify() - if rc.isErr: - hdl.error = TypedPutHdlErrRef( - pfx: VtxPfx, - vid: rvid.vid, - code: rc.error) - return - hdl.sTab[rvid] = rc.value + hdl.sTab[rvid] = vtx.blobify() else: hdl.sTab[rvid] = EmptyBlob diff --git a/nimbus/db/aristo/aristo_init/rocks_db.nim b/nimbus/db/aristo/aristo_init/rocks_db.nim index 702d359f1e..15c51c054f 100644 --- a/nimbus/db/aristo/aristo_init/rocks_db.nim +++ b/nimbus/db/aristo/aristo_init/rocks_db.nim @@ -312,10 +312,8 @@ iterator walkVtx*( be: RdbBackendRef; ): tuple[evid: RootedVertexID, vtx: VertexRef] = ## Variant of `walk()` iteration over the vertex sub-table. - for (rvid, data) in be.rdb.walkVtx: - let rc = data.deblobify VertexRef - if rc.isOk: - yield (rvid, rc.value) + for (rvid, vtx) in be.rdb.walkVtx: + yield (rvid, vtx) iterator walkKey*( be: RdbBackendRef; diff --git a/nimbus/db/aristo/aristo_init/rocks_db/rdb_get.nim b/nimbus/db/aristo/aristo_init/rocks_db/rdb_get.nim index b36a279e2e..f3fad1f547 100644 --- a/nimbus/db/aristo/aristo_init/rocks_db/rdb_get.nim +++ b/nimbus/db/aristo/aristo_init/rocks_db/rdb_get.nim @@ -63,10 +63,10 @@ proc getKey*( return ok(move(rc.value)) # Otherwise fetch from backend database - var res: Result[HashKey,(AristoError,string)] + # A threadvar is used to avoid allocating an environment for onData + var res{.threadvar.}: Opt[HashKey] let onData = proc(data: openArray[byte]) = - res = HashKey.fromBytes(data).mapErr(proc(): auto = - (RdbHashKeyExpected,"")) + res = HashKey.fromBytes(data) let gotData = rdb.keyCol.get(rvid.blobify().data(), onData).valueOr: const errSym = RdbBeDriverGetKeyError @@ -76,9 +76,9 @@ proc getKey*( # Correct result if needed if not gotData: - res = ok(VOID_HASH_KEY) + res.ok(VOID_HASH_KEY) elif res.isErr(): - return res # Parsing failed + return err((RdbHashKeyExpected,"")) # Parsing failed # Update cache and return ok rdb.rdKeyLru.lruAppend(rvid.vid, res.value(), RdKeyLruMaxSize) @@ -93,10 +93,10 @@ proc getVtx*( return ok(move(rc.value)) # Otherwise fetch from backend database - var res: Result[VertexRef,(AristoError,string)] + # A threadvar is used to avoid allocating an environment for onData + var res {.threadvar.}: Result[VertexRef,AristoError] let onData = proc(data: openArray[byte]) = - res = data.deblobify(VertexRef).mapErr(proc(error: AristoError): auto = - (error,"")) + res = data.deblobify(VertexRef) let gotData = rdb.vtxCol.get(rvid.blobify().data(), onData).valueOr: const errSym = RdbBeDriverGetVtxError @@ -105,9 +105,9 @@ proc getVtx*( return err((errSym,error)) if not gotData: - res = ok(VertexRef(nil)) + res.ok(VertexRef(nil)) elif res.isErr(): - return res # Parsing failed + return err((res.error(), "Parsing failed")) # Parsing failed # Update cache and return ok rdb.rdVtxLru.lruAppend(rvid.vid, res.value(), RdVtxLruMaxSize) diff --git a/nimbus/db/aristo/aristo_init/rocks_db/rdb_put.nim b/nimbus/db/aristo/aristo_init/rocks_db/rdb_put.nim index 640097b9eb..f303191640 100644 --- a/nimbus/db/aristo/aristo_init/rocks_db/rdb_put.nim +++ b/nimbus/db/aristo/aristo_init/rocks_db/rdb_put.nim @@ -122,12 +122,7 @@ proc putVtx*( ): Result[void,(VertexID,AristoError,string)] = let dsc = rdb.session if vtx.isValid: - let rc = vtx.blobify() - if rc.isErr: - # Caller must `rollback()` which will flush the `rdVtxLru` cache - return err((rvid.vid,rc.error,"")) - - dsc.put(rvid.blobify().data(), rc.value, rdb.vtxCol.handle()).isOkOr: + dsc.put(rvid.blobify().data(), vtx.blobify(), rdb.vtxCol.handle()).isOkOr: # Caller must `rollback()` which will flush the `rdVtxLru` cache const errSym = RdbBeDriverPutVtxError when extraTraceMessages: diff --git a/nimbus/db/aristo/aristo_init/rocks_db/rdb_walk.nim b/nimbus/db/aristo/aristo_init/rocks_db/rdb_walk.nim index 637c6272d7..ab57dd9a25 100644 --- a/nimbus/db/aristo/aristo_init/rocks_db/rdb_walk.nim +++ b/nimbus/db/aristo/aristo_init/rocks_db/rdb_walk.nim @@ -72,8 +72,8 @@ iterator walkKey*(rdb: RdbInst): tuple[rvid: RootedVertexID, data: Blob] = yield (rvid, val) -iterator walkVtx*(rdb: RdbInst): tuple[rvid: RootedVertexID, data: Blob] = - ## Walk over key-value pairs of the hash key column of the database. +iterator walkVtx*(rdb: RdbInst): tuple[rvid: RootedVertexID, data: VertexRef] = + ## Walk over key-value pairs of the vertex column of the database. ## ## Non-decodable entries are are ignored. ## @@ -84,12 +84,32 @@ iterator walkVtx*(rdb: RdbInst): tuple[rvid: RootedVertexID, data: Blob] = break walkBody defer: rit.close() - for (key,val) in rit.pairs: - if val.len != 0: - let rvid = key.deblobify(RootedVertexID).valueOr: - continue - - yield (rvid, val) + rit.seekToFirst() + var key: RootedVertexID + var value: VertexRef + while rit.isValid(): + var valid = true + rit.key( + proc(data: openArray[byte]) = + key = deblobify(data, RootedVertexID).valueOr: + valid = false + default(RootedVertexID) + ) + if not valid: + continue + + rit.value( + proc(data: openArray[byte]) = + value = deblobify(data, VertexRef).valueOr: + valid = false + default(VertexRef) + ) + if not valid: + continue + + rit.next() + yield (key, value) + rit.close() # ------------------------------------------------------------------------------ # End diff --git a/nimbus/db/aristo/aristo_serialise.nim b/nimbus/db/aristo/aristo_serialise.nim index 77b1b9a9c5..13f2810f1f 100644 --- a/nimbus/db/aristo/aristo_serialise.nim +++ b/nimbus/db/aristo/aristo_serialise.nim @@ -86,7 +86,7 @@ proc to*(node: NodeRef; T: type seq[Blob]): T = var wrx = initRlpWriter() wrx.startList(2) - wrx.append node.ePfx.toHexPrefix(isleaf = false) + wrx.append node.ePfx.toHexPrefix(isleaf = false).data() wrx.append brHash result.add wrx.finish() @@ -104,7 +104,7 @@ proc to*(node: NodeRef; T: type seq[Blob]): T = var wr = initRlpWriter() wr.startList(2) - wr.append node.lPfx.toHexPrefix(isleaf = true) + wr.append node.lPfx.toHexPrefix(isleaf = true).data() wr.append node.lData.serialise(getKey0).value result.add (wr.finish()) @@ -127,7 +127,7 @@ proc digestTo*(node: NodeRef; T: type HashKey): T = let brHash = wr.finish().digestTo(HashKey) wr = initRlpWriter() wr.startList(2) - wr.append node.ePfx.toHexPrefix(isleaf = false) + wr.append node.ePfx.toHexPrefix(isleaf = false).data() wr.append brHash of Leaf: @@ -138,7 +138,7 @@ proc digestTo*(node: NodeRef; T: type HashKey): T = ok(node.key[0]) # always succeeds wr.startList(2) - wr.append node.lPfx.toHexPrefix(isleaf = true) + wr.append node.lPfx.toHexPrefix(isleaf = true).data() wr.append node.lData.serialise(getKey0).value wr.finish().digestTo(HashKey) diff --git a/tests/test_aristo/test_blobify.nim b/tests/test_aristo/test_blobify.nim index b94ee8dfa1..03d39b1c94 100644 --- a/tests/test_aristo/test_blobify.nim +++ b/tests/test_aristo/test_blobify.nim @@ -65,8 +65,8 @@ suite "Aristo blobify": ) check: - deblobify(blobify(leafRawData)[], VertexRef)[] == leafRawData - deblobify(blobify(leafAccount)[], VertexRef)[] == leafAccount - deblobify(blobify(leafStoData)[], VertexRef)[] == leafStoData - deblobify(blobify(branch)[], VertexRef)[] == branch - deblobify(blobify(extension)[], VertexRef)[] == extension + deblobify(blobify(leafRawData), VertexRef)[] == leafRawData + deblobify(blobify(leafAccount), VertexRef)[] == leafAccount + deblobify(blobify(leafStoData), VertexRef)[] == leafStoData + deblobify(blobify(branch), VertexRef)[] == branch + deblobify(blobify(extension), VertexRef)[] == extension diff --git a/tests/test_aristo/test_portal_proof.nim b/tests/test_aristo/test_portal_proof.nim index 4d6b5db70a..8af4e39b0a 100644 --- a/tests/test_aristo/test_portal_proof.nim +++ b/tests/test_aristo/test_portal_proof.nim @@ -115,7 +115,7 @@ func asExtension(b: Blob; path: Hash256): Blob = var wr = initRlpWriter() wr.startList(2) - wr.append NibblesBuf.fromBytes(@[nibble]).slice(1).toHexPrefix(isleaf=false) + wr.append NibblesBuf.fromBytes(@[nibble]).slice(1).toHexPrefix(isleaf=false).data() wr.append node.listElem(nibble.int).toBytes wr.finish() diff --git a/vendor/nim-stew b/vendor/nim-stew index 54cc67cbb8..fc09b2e023 160000 --- a/vendor/nim-stew +++ b/vendor/nim-stew @@ -1 +1 @@ -Subproject commit 54cc67cbb83f61b6e3168b09701758c5b805120a +Subproject commit fc09b2e023ab2d73e425f7d15cf94871c7867868