diff --git a/internal/block/block.go b/internal/block/block.go index 94f7eba..4e6bacb 100644 --- a/internal/block/block.go +++ b/internal/block/block.go @@ -9,8 +9,8 @@ type Block struct { // Extrinsic represents the block extrinsic data type Extrinsic struct { ET TicketExtrinsic - ED DisputeExtrinsic EP PreimageExtrinsic - EA AssurancesExtrinsic EG GuaranteesExtrinsic + EA AssurancesExtrinsic + ED DisputeExtrinsic } diff --git a/internal/block/block_test.go b/internal/block/block_test.go index d667206..06aebfe 100644 --- a/internal/block/block_test.go +++ b/internal/block/block_test.go @@ -3,15 +3,15 @@ package block import ( "crypto/ed25519" "crypto/rand" - "github.com/eigerco/strawberry/pkg/serialization/codec/jam" "testing" - "github.com/eigerco/strawberry/internal/common" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/eigerco/strawberry/internal/common" "github.com/eigerco/strawberry/internal/crypto" "github.com/eigerco/strawberry/internal/testutils" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" + "github.com/eigerco/strawberry/pkg/serialization/codec/jam" ) func Test_BlockEncodeDecode(t *testing.T) { @@ -156,6 +156,7 @@ func Test_BlockEncodeDecode(t *testing.T) { Output: WorkResultOutputOrError{CodeTooLarge}, }, }, + SegmentRootLookup: make(map[crypto.Hash]crypto.Hash), }, Credentials: []CredentialSignature{ { diff --git a/internal/block/guarantee.go b/internal/block/guarantee.go index 1c679f4..a623aa7 100644 --- a/internal/block/guarantee.go +++ b/internal/block/guarantee.go @@ -26,18 +26,16 @@ type CredentialSignature struct { Signature [crypto.Ed25519SignatureSize]byte // The Ed25519 signature } -// WorkReport represents a work report in the JAM state (equation 118 v0.4.5) +// WorkReport represents a work report in the JAM state (equation 11.2 v0.5.0) // TODO: The total serialized size of a work-report may be no greater than MaxWorkPackageSizeBytes. type WorkReport struct { - WorkPackageSpecification WorkPackageSpecification // Work-package specification (s) - RefinementContext RefinementContext // Refinement context (x) - CoreIndex uint16 // Core index (c) - Max value: TotalNumberOfCores - AuthorizerHash crypto.Hash // HeaderHash of the authorizer (a) - Output []byte // Output of the work report (o) - WorkResults []WorkResult // Results of the evaluation of each of the items in the work-package (r) - Min value: MinWorkPackageResultsSize. Max value: MaxWorkPackageResultsSize. - - // TODO ignore this field for now as the test vectors still do not include it - SegmentRootLookup map[crypto.Hash]crypto.Hash `json:"-" jam:"-"` // A segment-root lookup dictionary (l ∈ D⟨H → H⟩) + WorkPackageSpecification WorkPackageSpecification // Work-package specification (s) + RefinementContext RefinementContext // Refinement context (x) + CoreIndex uint16 // Core index (c) - Max value: TotalNumberOfCores + AuthorizerHash crypto.Hash // HeaderHash of the authorizer (a) + Output []byte // Output of the work report (o) + SegmentRootLookup map[crypto.Hash]crypto.Hash // A segment-root lookup dictionary (l ∈ D⟨H → H⟩) + WorkResults []WorkResult // Results of the evaluation of each of the items in the work-package (r) - Min value: MinWorkPackageResultsSize. Max value: MaxWorkPackageResultsSize. } type WorkPackageSpecification struct { @@ -45,13 +43,14 @@ type WorkPackageSpecification struct { AuditableWorkBundleLength uint32 // Length of the auditable work bundle (l) ErasureRoot crypto.Hash // Erasure root (u) - is the root of a binary Merkle tree which functions as a commitment to all data required for the auditing of the report and for use by later workpackages should they need to retrieve any data yielded. It is thus used by assurers to verify the correctness of data they have been sent by guarantors, and it is later verified as correct by auditors. SegmentRoot crypto.Hash // Segment root (e) - root of a constant-depth, left-biased and zero-hash-padded binary Merkle tree committing to the hashes of each of the exported segments of each work-item. These are used by guarantors to verify the correctness of any reconstructed segments they are called upon to import for evaluation of some later work-package. + SegmentCount uint16 // Segment count (n) } // RefinementContext describes the context of the chain at the point that the report’s corresponding work-package was evaluated. type RefinementContext struct { Anchor RefinementContextAnchor // Historical block anchor LookupAnchor RefinementContextLookupAnchor // Historical block anchor - PrerequisiteWorkPackage *crypto.Hash // Prerequisite work package (p) (optional) + PrerequisiteWorkPackage []crypto.Hash // Prerequisite work package (p) (optional) } type RefinementContextAnchor struct { diff --git a/internal/block/header.go b/internal/block/header.go index 9f4c7e1..dd65932 100644 --- a/internal/block/header.go +++ b/internal/block/header.go @@ -4,6 +4,7 @@ import ( "crypto/ed25519" "errors" "fmt" + "github.com/eigerco/strawberry/pkg/db" "github.com/eigerco/strawberry/pkg/db/pebble" @@ -30,8 +31,9 @@ type Header struct { // EpochMarker consists of epoch randomness and a sequence of // Bandersnatch keys defining the Bandersnatch validator keys (kb) beginning in the next epoch. type EpochMarker struct { - Entropy crypto.Hash - Keys [common.NumberOfValidators]crypto.BandersnatchPublicKey + Entropy crypto.Hash + TicketsEntropy crypto.Hash + Keys [common.NumberOfValidators]crypto.BandersnatchPublicKey } type WinningTicketMarker [jamtime.TimeslotsPerEpoch]Ticket diff --git a/internal/state/block_seal_test.go b/internal/state/block_seal_test.go index 77a3fdf..c6576a9 100644 --- a/internal/state/block_seal_test.go +++ b/internal/state/block_seal_test.go @@ -3,14 +3,15 @@ package state import ( "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/eigerco/strawberry/internal/block" "github.com/eigerco/strawberry/internal/crypto" "github.com/eigerco/strawberry/internal/safrole" "github.com/eigerco/strawberry/internal/testutils" "github.com/eigerco/strawberry/internal/validator" "github.com/eigerco/strawberry/pkg/serialization/codec/jam" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func TestEncodeUnsealedHeader(t *testing.T) { diff --git a/internal/state/merkle/helpers_test.go b/internal/state/merkle/helpers_test.go index acfa21d..dc8137c 100644 --- a/internal/state/merkle/helpers_test.go +++ b/internal/state/merkle/helpers_test.go @@ -4,9 +4,12 @@ import ( "crypto/ed25519" "errors" "fmt" - "github.com/eigerco/strawberry/internal/state" "testing" + "github.com/eigerco/strawberry/internal/state" + + "github.com/stretchr/testify/require" + "github.com/eigerco/strawberry/internal/block" "github.com/eigerco/strawberry/internal/common" "github.com/eigerco/strawberry/internal/crypto" @@ -15,7 +18,6 @@ import ( "github.com/eigerco/strawberry/internal/testutils" "github.com/eigerco/strawberry/internal/validator" "github.com/eigerco/strawberry/pkg/serialization/codec/jam" - "github.com/stretchr/testify/require" ) func RandomValidatorsData(t *testing.T) safrole.ValidatorsData { @@ -116,10 +118,11 @@ func RandomCoreAssignments(t *testing.T) state.CoreAssignments { LookupAnchor: block.RefinementContextLookupAnchor{HeaderHash: testutils.RandomHash(t), Timeslot: testutils.RandomTimeslot()}, PrerequisiteWorkPackage: nil, }, - CoreIndex: uint16(i), - AuthorizerHash: testutils.RandomHash(t), - Output: []byte("output"), - WorkResults: []block.WorkResult{RandomWorkResult(t)}, + CoreIndex: uint16(i), + AuthorizerHash: testutils.RandomHash(t), + Output: []byte("output"), + SegmentRootLookup: make(map[crypto.Hash]crypto.Hash), + WorkResults: []block.WorkResult{RandomWorkResult(t)}, }, Time: testutils.RandomTimeslot(), } @@ -182,10 +185,11 @@ func RandomWorkReport(t *testing.T) block.WorkReport { Timeslot: testutils.RandomTimeslot(), }, }, - CoreIndex: testutils.RandomUint16(), - AuthorizerHash: testutils.RandomHash(t), - Output: []byte("random output"), - WorkResults: []block.WorkResult{RandomWorkResult(t)}, + CoreIndex: testutils.RandomUint16(), + AuthorizerHash: testutils.RandomHash(t), + Output: []byte("random output"), + SegmentRootLookup: make(map[crypto.Hash]crypto.Hash), + WorkResults: []block.WorkResult{RandomWorkResult(t)}, } } diff --git a/internal/state/merkle/serialization_test.go b/internal/state/merkle/serialization_test.go index 8b81f7c..d399033 100644 --- a/internal/state/merkle/serialization_test.go +++ b/internal/state/merkle/serialization_test.go @@ -1,12 +1,14 @@ package state import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/eigerco/strawberry/internal/crypto" "github.com/eigerco/strawberry/internal/safrole" "github.com/eigerco/strawberry/pkg/serialization/codec/jam" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "testing" ) func TestSerializeState(t *testing.T) { diff --git a/internal/statetransition/state_transition.go b/internal/statetransition/state_transition.go index 0b0abeb..de2d087 100644 --- a/internal/statetransition/state_transition.go +++ b/internal/statetransition/state_transition.go @@ -3,11 +3,11 @@ package statetransition import ( "bytes" "crypto/ed25519" + "crypto/sha256" "errors" "fmt" "log" "maps" - "reflect" "slices" "sort" "sync" @@ -871,10 +871,32 @@ func calculateNewCoreAssignments( return newAssignments } -func validateExtrinsicGuarantees(header block.Header, currentState *state.State, guarantees block.GuaranteesExtrinsic, ancestorStore *block.AncestorStore) error { +// generateRefinementContextID serializes the RefinementContext and returns its SHA-256 hash as a hex string. +func generateRefinementContextID(context block.RefinementContext) (string, error) { + serialized, err := jam.Marshal(context) + if err != nil { + return "", fmt.Errorf("failed to serialize RefinementContext: %w", err) + } + + hash := sha256.Sum256(serialized) - // let x ≡ {w_x S w ∈ w} , p ≡ {(w_s)h S w ∈ w} (145 v0.4.5) - contexts := make(map[block.RefinementContext]struct{}) + // Convert hash to a hex + return fmt.Sprintf("%x", hash), nil +} + +// computeWorkReportHash computes a SHA-256 hash of the WorkReport +func computeWorkReportHash(workReport block.WorkReport) (crypto.Hash, error) { + serialized, err := jam.Marshal(workReport) + if err != nil { + return crypto.Hash{}, fmt.Errorf("failed to serialize WorkReport: %w", err) + } + hash := sha256.Sum256(serialized) + + return hash, nil +} + +func validateExtrinsicGuarantees(header block.Header, currentState *state.State, guarantees block.GuaranteesExtrinsic, ancestorStore *block.AncestorStore) error { + contexts := make(map[string]struct{}) extrinsicWorkPackages := make(map[crypto.Hash]crypto.Hash) prerequisitePackageHashes := make(map[crypto.Hash]struct{}) @@ -892,9 +914,17 @@ func validateExtrinsicGuarantees(header block.Header, currentState *state.State, } for _, guarantee := range guarantees.Guarantees { - contexts[guarantee.WorkReport.RefinementContext] = struct{}{} + context := guarantee.WorkReport.RefinementContext + + // Generate a unique ID for the context + contextID, err := generateRefinementContextID(context) + if err != nil { + return fmt.Errorf("failed to generate RefinementContextID: %w", err) + } + + contexts[contextID] = struct{}{} extrinsicWorkPackages[guarantee.WorkReport.WorkPackageSpecification.WorkPackageHash] = guarantee.WorkReport.WorkPackageSpecification.SegmentRoot - // ∀w ∈ w ∶ [∑ r∈wr] (rg) ≤ GA ∧ ∀r ∈ wr ∶ rg ≥ δ[rs]g (eq. 144 v0.4.5) + // ∀w ∈ w ∶ [∑ r∈wr] (rg) ≤ GA ∧ ∀r ∈ wr ∶ rg ≥ δ[rs]g (eq. 11.29 0.5.0) totalGas := uint64(0) for _, r := range guarantee.WorkReport.WorkResults { if r.GasPrioritizationRatio < currentState.Services[r.ServiceId].GasLimitForAccumulator { @@ -909,45 +939,68 @@ func validateExtrinsicGuarantees(header block.Header, currentState *state.State, for key := range guarantee.WorkReport.SegmentRootLookup { prerequisitePackageHashes[key] = struct{}{} } - if guarantee.WorkReport.RefinementContext.PrerequisiteWorkPackage != nil { - prerequisitePackageHashes[*guarantee.WorkReport.RefinementContext.PrerequisiteWorkPackage] = struct{}{} - // let q = {(wx)p S q ∈ ϑ, w ∈ K(q)} (150 v0.4.5) + // **Updated Part: Handle multiple PrerequisiteWorkPackages** + for _, prereqHash := range context.PrerequisiteWorkPackage { + prerequisitePackageHashes[prereqHash] = struct{}{} + + // let q = {(wx)p S q ∈ ϑ, w ∈ K(q)} (eq. 11.35 0.5.0) for _, workReportsAndDeps := range currentState.AccumulationQueue { for _, wd := range workReportsAndDeps { - if reflect.DeepEqual(wd.WorkReport, guarantee.WorkReport) { // TODO maybe use hash compare instead of reflect - pastWorkPackages[*guarantee.WorkReport.RefinementContext.PrerequisiteWorkPackage] = struct{}{} + // Compare the hashes + wdHash, err := computeWorkReportHash(wd.WorkReport) + if err != nil { + return fmt.Errorf("failed to compute WorkReport hash: %w", err) + } + currentGuaranteeHash, err := computeWorkReportHash(guarantee.WorkReport) + if err != nil { + return fmt.Errorf("failed to compute current WorkReport hash: %w", err) + } + if wdHash == currentGuaranteeHash { + pastWorkPackages[prereqHash] = struct{}{} } } } } - // let a = {((iw )x)p S i ∈ ρ, i ≠ ∅} (151 v0.4.5) + // let a = {((iw )x)p S i ∈ ρ, i ≠ ∅} (eq. 11.36 0.5.0) for _, ca := range currentState.CoreAssignments { - if ca.WorkReport != nil && ca.WorkReport.RefinementContext.PrerequisiteWorkPackage != nil { - pastWorkPackages[*ca.WorkReport.RefinementContext.PrerequisiteWorkPackage] = struct{}{} + if ca.WorkReport != nil { + for _, prereqHash := range ca.WorkReport.RefinementContext.PrerequisiteWorkPackage { + pastWorkPackages[prereqHash] = struct{}{} + } } } } - // |p| = |w| (146 v0.4.5) + // |p| = |w| (eq. 11.31 0.5.0) if len(extrinsicWorkPackages) != len(guarantees.Guarantees) { return fmt.Errorf("cardinality of work-package hashes is not equal to the length of work-reports") } - for context := range contexts { - // ∀x ∈ x ∶ ∃y ∈ β ∶ x_a = y_h ∧ x_s = ys ∧ xb = HK (EM (yb)) (147 v0.4.5) + for _, guarantee := range guarantees.Guarantees { + context := guarantee.WorkReport.RefinementContext + contextID, err := generateRefinementContextID(context) + if err != nil { + return fmt.Errorf("failed to generate RefinementContextID: %w", err) + } + + if _, exists := contexts[contextID]; !exists { + return fmt.Errorf("context ID not found in contexts map") + } + + // ∀x ∈ x ∶ ∃y ∈ β ∶ x_a = y_h ∧ x_s = y_s ∧ x_b = HK (EM (y_b)) (eq. 11.32 0.5.0) if !anchorBlockInRecentBlocks(context, currentState) { return fmt.Errorf("anchor block not present within recent blocks") } - // ∀x ∈ x ∶ xt ≥ Ht − L (148 v0.4.5) + // ∀x ∈ x ∶ xt ≥ Ht − L (eq. 11.33 0.5.0) if context.LookupAnchor.Timeslot < header.TimeSlotIndex-state.MaxTimeslotsForPreimage { - return fmt.Errorf("lookup anchor block not withing the last %d timeslots", state.MaxTimeslotsForPreimage) + return fmt.Errorf("lookup anchor block (timeslot %d) not within the last %d timeslots (current timeslot: %d)", context.LookupAnchor.Timeslot, state.MaxTimeslotsForPreimage, header.TimeSlotIndex) } - // ∀x ∈ x ∶ ∃h ∈ A ∶ ht = xt ∧ H(h) = xl (149 v0.4.5) - _, err := ancestorStore.FindAncestor(func(ancestor block.Header) bool { + // ∀x ∈ x ∶ ∃h ∈ A ∶ ht = xt ∧ H(h) = xl (eq. 11.34 0.5.0) + _, err = ancestorStore.FindAncestor(func(ancestor block.Header) bool { encodedHeader, err := jam.Marshal(ancestor) if err != nil { return false @@ -964,21 +1017,28 @@ func validateExtrinsicGuarantees(header block.Header, currentState *state.State, accHistoryPrerequisites := make(map[crypto.Hash]struct{}) for _, hashSet := range currentState.AccumulationHistory { - maps.Copy(accHistoryPrerequisites, hashSet) + for h := range hashSet { + accHistoryPrerequisites[h] = struct{}{} + } } - // ∀p ∈ p, p ∉ [⋃ x∈β] K(x_p) ∪ [⋃ x∈ξ] x ∪ q ∪ a (152 v0.4.5) + // ∀p ∈ p, p ∉ [⋃ x∈β] K(x_p) ∪ [⋃ x∈ξ] x ∪ q ∪ a (eq. 11.37 0.5.0) for p := range extrinsicWorkPackages { if _, ok := pastWorkPackages[p]; ok { return fmt.Errorf("report work-package is the work-package of some other report made in the past") } } - // p ∪ {x | x ∈ b_p, b ∈ β} (153,154 v0.4.5) - extrinsicAndRecentWorkPackages := maps.Clone(extrinsicWorkPackages) - maps.Copy(extrinsicAndRecentWorkPackages, recentBlockPrerequisites) + // p ∪ {x | x ∈ b_p, b ∈ β} (eq. 11.33, 11.39 0.5.0) + extrinsicAndRecentWorkPackages := make(map[crypto.Hash]crypto.Hash) + for k, v := range extrinsicWorkPackages { + extrinsicAndRecentWorkPackages[k] = v + } + for k, v := range recentBlockPrerequisites { + extrinsicAndRecentWorkPackages[k] = v + } - // ∀w ∈ w, ∀p ∈ (wx)p ∪ K(wl) ∶ p ∈ p ∪ {x S x ∈ K(bp), b ∈ β} (153 v0.4.5) + // ∀w ∈ w, ∀p ∈ (wx)p ∪ K(wl) ∶ p ∈ p ∪ {x S x ∈ K(bp), b ∈ β} (eq. 11.38 0.5.0) for p := range prerequisitePackageHashes { if _, ok := extrinsicWorkPackages[p]; !ok { return fmt.Errorf("prerequisite report work-package is neither in the extrinsic nor in recent history") @@ -986,14 +1046,14 @@ func validateExtrinsicGuarantees(header block.Header, currentState *state.State, } for _, guarantee := range guarantees.Guarantees { - // ∀w ∈ w ∶ wl ⊆ p ∪ [⋃ b∈β] b_p (155 v0.4.5) + // ∀w ∈ w ∶ wl ⊆ p ∪ [⋃ b∈β] b_p (eq. 11.40 0.5.0) for lookupKey, lookupValue := range guarantee.WorkReport.SegmentRootLookup { if extrinsicAndRecentWorkPackages[lookupKey] != lookupValue { return fmt.Errorf("segment root not present in the present nor recent blocks") } } - // ∀w ∈ w, ∀r ∈ wr ∶ rc = δ[rs]c (156 v0.4.5) + // ∀w ∈ w, ∀r ∈ wr ∶ rc = δ[rs]c (eq. 11.41 0.5.0) for _, workResult := range guarantee.WorkReport.WorkResults { if workResult.ServiceHashCode != currentState.Services[workResult.ServiceId].CodeHash { return fmt.Errorf("work result code does not correspond with the service code") @@ -1321,8 +1381,8 @@ func accumulationPriority(workReportAndDeps []state.WorkReportWithUnAccumulatedD // getWorkReportDependencies (167) D(w) ≡ (w, {(wx)p} ∪ K(wl)) func getWorkReportDependencies(workReport block.WorkReport) state.WorkReportWithUnAccumulatedDependencies { deps := make(map[crypto.Hash]struct{}) - if workReport.RefinementContext.PrerequisiteWorkPackage != nil { - deps[*workReport.RefinementContext.PrerequisiteWorkPackage] = struct{}{} + for _, prereqHash := range workReport.RefinementContext.PrerequisiteWorkPackage { + deps[prereqHash] = struct{}{} } for key := range workReport.SegmentRootLookup { deps[key] = struct{}{} diff --git a/tests/integration/block_integration_test.go b/tests/integration/block_integration_test.go index a59694a..807487e 100644 --- a/tests/integration/block_integration_test.go +++ b/tests/integration/block_integration_test.go @@ -24,7 +24,7 @@ var toWorkResultErrorMap = map[string]block.WorkResultError{ } func TestDecodeBlockWithJamCodec(t *testing.T) { - b, err := os.ReadFile("vectors/block.bin") + b, err := os.ReadFile("vectors/codec/block.bin") require.NoError(t, err) var unmarshaled block.Block @@ -40,6 +40,7 @@ func TestDecodeBlockWithJamCodec(t *testing.T) { require.Equal(t, expected.Header.Slot, unmarshaled.Header.TimeSlotIndex) require.Equal(t, expected.Header.EpochMark.Entropy, toHex(unmarshaled.Header.EpochMarker.Entropy)) + require.Equal(t, expected.Header.EpochMark.TicketsEntropy, toHex(unmarshaled.Header.EpochMarker.TicketsEntropy)) for i := range expected.Header.EpochMark.Validators { require.Equal(t, expected.Header.EpochMark.Validators[i], toHex(unmarshaled.Header.EpochMarker.Keys[i])) @@ -97,25 +98,27 @@ func TestDecodeBlockWithJamCodec(t *testing.T) { for i := range expected.Extrinsic.Guarantees { require.Equal(t, expected.Extrinsic.Guarantees[i].Report.PackageSpec.Hash, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkPackageSpecification.WorkPackageHash)) - require.Equal(t, expected.Extrinsic.Guarantees[i].Report.PackageSpec.Len, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkPackageSpecification.AuditableWorkBundleLength) + require.Equal(t, expected.Extrinsic.Guarantees[i].Report.PackageSpec.Length, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkPackageSpecification.AuditableWorkBundleLength) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.PackageSpec.ErasureRoot, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkPackageSpecification.ErasureRoot)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.PackageSpec.ExportsRoot, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkPackageSpecification.SegmentRoot)) + require.Equal(t, expected.Extrinsic.Guarantees[i].Report.PackageSpec.ExportsCount, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkPackageSpecification.SegmentCount) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Context.Anchor, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.Anchor.HeaderHash)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Context.StateRoot, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.Anchor.PosteriorStateRoot)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Context.BeefyRoot, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.Anchor.PosteriorBeefyRoot)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Context.LookupAnchor, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.LookupAnchor.HeaderHash)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Context.LookupAnchorSlot, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.LookupAnchor.Timeslot) - require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Context.Prerequisite, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.PrerequisiteWorkPackage) + assertHashSlicesEqual(t, expected.Extrinsic.Guarantees[i].Report.Context.Prerequisites, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.RefinementContext.PrerequisiteWorkPackage) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.CoreIndex, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.CoreIndex) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.AuthorizerHash, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.AuthorizerHash)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.AuthOutput, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.Output)) for j := range expected.Extrinsic.Guarantees[i].Report.Results { - require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Results[j].Service, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].ServiceId) + require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Results[j].ServiceId, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].ServiceId) + require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Results[j].CodeHash, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].ServiceHashCode)) require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Results[j].PayloadHash, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].PayloadHash)) - require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Results[j].GasRatio, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].GasPrioritizationRatio) + require.Equal(t, expected.Extrinsic.Guarantees[i].Report.Results[j].Gas, unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].GasPrioritizationRatio) if expected.Extrinsic.Guarantees[i].Report.Results[j].Result.Ok != nil { require.Equal(t, *expected.Extrinsic.Guarantees[i].Report.Results[j].Result.Ok, toHex(unmarshaled.Extrinsic.EG.Guarantees[i].WorkReport.WorkResults[j].Output.Inner)) } @@ -140,8 +143,19 @@ func toHex(data any) string { return fmt.Sprintf("0x%x", data) } +func assertHashSlicesEqual(t *testing.T, expected []crypto.Hash, actual []crypto.Hash) { + if expected == nil { + expected = []crypto.Hash{} + } + if actual == nil { + actual = []crypto.Hash{} + } + + require.Equal(t, expected, actual, "Hashes do not match") +} + func unmarsalExpectedBlock(t *testing.T) expectedBlock { - b, err := os.ReadFile("vectors/expected_block.json") + b, err := os.ReadFile("vectors/codec/expected_block.json") require.NoError(t, err) var unmarshaled expectedBlock @@ -181,8 +195,9 @@ type expectedBlock struct { ExtrinsicHash string `json:"extrinsic_hash"` Slot jamtime.Timeslot `json:"slot"` EpochMark struct { - Entropy string `json:"entropy"` - Validators []string `json:"validators"` + Entropy string `json:"entropy"` + TicketsEntropy string `json:"tickets_entropy"` + Validators []string `json:"validators"` } `json:"epoch_mark"` TicketsMark *block.WinningTicketMarker `json:"tickets_mark"` OffendersMark []string `json:"offenders_mark"` @@ -195,45 +210,18 @@ type expectedBlock struct { Attempt uint8 `json:"attempt"` Signature string `json:"signature"` } `json:"tickets"` - Disputes struct { - Verdicts []struct { - Target string `json:"target"` - Age uint32 `json:"age"` - Votes []struct { - Vote bool `json:"vote"` - Index uint16 `json:"index"` - Signature string `json:"signature"` - } `json:"votes"` - } `json:"verdicts"` - Culprits []struct { - Target string `json:"target"` - Key string `json:"key"` - Signature string `json:"signature"` - } `json:"culprits"` - Faults []struct { - Target string `json:"target"` - Vote bool `json:"vote"` - Key string `json:"key"` - Signature string `json:"signature"` - } `json:"faults"` - } `json:"disputes"` Preimages []struct { Requester uint32 `json:"requester"` Blob string `json:"blob"` } `json:"preimages"` - Assurances []struct { - Anchor string `json:"anchor"` - Bitfield string `json:"bitfield"` - ValidatorIndex uint16 `json:"validator_index"` - Signature string `json:"signature"` - } `json:"assurances"` Guarantees []struct { Report struct { PackageSpec struct { - Hash string `json:"hash"` - Len uint32 `json:"len"` - ErasureRoot string `json:"erasure_root"` - ExportsRoot string `json:"exports_root"` + Hash string `json:"hash"` + Length uint32 `json:"length"` + ErasureRoot string `json:"erasure_root"` + ExportsRoot string `json:"exports_root"` + ExportsCount uint16 `json:"exports_count"` } `json:"package_spec"` Context struct { Anchor string `json:"anchor"` @@ -241,16 +229,17 @@ type expectedBlock struct { BeefyRoot string `json:"beefy_root"` LookupAnchor string `json:"lookup_anchor"` LookupAnchorSlot jamtime.Timeslot `json:"lookup_anchor_slot"` - Prerequisite *crypto.Hash `json:"Prerequisite"` + Prerequisites []crypto.Hash `json:"prerequisites"` } `json:"context"` - CoreIndex uint16 `json:"core_index"` - AuthorizerHash string `json:"authorizer_hash"` - AuthOutput string `json:"auth_output"` - Results []struct { - Service block.ServiceId `json:"service"` + CoreIndex uint16 `json:"core_index"` + AuthorizerHash string `json:"authorizer_hash"` + AuthOutput string `json:"auth_output"` + SegmentRootLookup []interface{} `json:"segment_root_lookup"` + Results []struct { + ServiceId block.ServiceId `json:"service_id"` CodeHash string `json:"code_hash"` PayloadHash string `json:"payload_hash"` - GasRatio uint64 `json:"gas_ratio"` + Gas uint64 `json:"gas"` Result Result `json:"result"` } `json:"results"` } `json:"report"` @@ -260,5 +249,33 @@ type expectedBlock struct { Signature string `json:"signature"` } `json:"signatures"` } `json:"guarantees"` + Assurances []struct { + Anchor string `json:"anchor"` + Bitfield string `json:"bitfield"` + ValidatorIndex uint16 `json:"validator_index"` + Signature string `json:"signature"` + } `json:"assurances"` + Disputes struct { + Verdicts []struct { + Target string `json:"target"` + Age uint32 `json:"age"` + Votes []struct { + Vote bool `json:"vote"` + Index uint16 `json:"index"` + Signature string `json:"signature"` + } `json:"votes"` + } `json:"verdicts"` + Culprits []struct { + Target string `json:"target"` + Key string `json:"key"` + Signature string `json:"signature"` + } `json:"culprits"` + Faults []struct { + Target string `json:"target"` + Vote bool `json:"vote"` + Key string `json:"key"` + Signature string `json:"signature"` + } `json:"faults"` + } `json:"disputes"` } `json:"extrinsic"` } diff --git a/tests/integration/vectors/block.bin b/tests/integration/vectors/codec/block.bin similarity index 83% rename from tests/integration/vectors/block.bin rename to tests/integration/vectors/codec/block.bin index 367973b..bfdf3aa 100644 Binary files a/tests/integration/vectors/block.bin and b/tests/integration/vectors/codec/block.bin differ diff --git a/tests/integration/vectors/expected_block.json b/tests/integration/vectors/codec/expected_block.json similarity index 96% rename from tests/integration/vectors/expected_block.json rename to tests/integration/vectors/codec/expected_block.json index 6b394e0..76229aa 100644 --- a/tests/integration/vectors/expected_block.json +++ b/tests/integration/vectors/codec/expected_block.json @@ -6,6 +6,7 @@ "slot": 42, "epoch_mark": { "entropy": "0xae85d6635e9ae539d0846b911ec86a27fe000f619b78bcac8a74b77e36f6dbcf", + "tickets_entropy": "0x333a7e328f0c4183f4b947e1d8f68aa4034f762e5ecdb5a7f6fbf0afea2fd8cd", "validators": [ "0x5e465beb01dbafe160ce8216047f2155dd0569f058afd52dcea601025a8d161d", "0x3d5e5a51aab2b048f8686ecd79712a80e3265a114cc73f14bdb2a59233fb66d0", @@ -38,6 +39,90 @@ "signature": "0x036479afe8e29f97741a1e3c2b59c217dc67d8a25f7ce12bf1febf6b4a5b2646fd81cdf0a75e20fc0de981f48471527728b0c4d151fbdd9b32e6c6b3c6cd71db9b51d1cca8eef4a128d63f542bdb129c6edca628e1276d8376347bd2071d949b7ec5d1bfc18ebc61753a5f24544d1ae75b6f6f35a5707fc99e787b86cb5a17138069f5bac8d16e08b20781b8841ff928a49ec7bf09f5f39159cb44370a5ce16b93b2d4325445bfd641f2ef750e1971690a4f11e815d418a4dca01f8d039f4543a59665dcea27a24dbb1fddca0a2e90b4e3292dfc7a5cee69c1e707cd491742f3d3015d9499b07bbf6ddc2342e48522c149089311cf6ccc9bf957b509e52df550f06e70f4f601c01c1326e0ce7e7ce2547fcc16509b8bdb1577b15adb042f1e393c01bc010ba3460119ab738e58e39297278315b0825b8b798835a7a0b133a0d890a24018f0797c3ba43d62dcda03320215c5734bc1b10e8bef5003aefb5513ce9cd1964db23b1a143c4da8142676e254f00f84cfc15fa1a3bab78536f5f9e2cbce7560be876142a8655b8c1fdf0ffab7e187ace3c37ed2c6dfc4590e658ad2efc8cdec6140452493e2693e682946d1c99a62cca529f46a6e9874ef72955e4d855881e35ef53fd95e307f9ebf94542860711bdd288ab13d8dec1fecd82fc9a2eb97e67ae38ec859034473972157b819b3215e359bf17952b95aaa2932f09e2a8c8e0fea82de9c6c2612766ed50702667520fbd5089c0cc300e9d32893799cf73520f5a925bdfb791a419e80e118d2ba69031566da30c06f5e83e74f4eae8a886018a4c1269a97217851bcb4bb945dbf1309d1d2e59d78474f5e52702536761eed0d9fd01e46261b61dca3fc85a0b4bdfa315edb0972fed80399909eef35ed10cda0e1a66680eedcf15b400d404dadf77cda5641412c20a465cea4411820ee491678821eef8dca745a2ffd8a787837d22b486342499ad84a51d4093655947185bfe72018c16b8fe622ab58c7d965e8dac3b1d5c9cedfb27f562d077ab85e1293496d69f6bafd8468855b50255788b430ed6e6ef8c0fde28651b005a91dbb0fd4d0635108d5ff9e4527fefa66817221b76f" } ], + "preimages": [ + { + "requester": 16909060, + "blob": "0x81095e6122e3bc9d961e00014a7fc833" + }, + { + "requester": 16909061, + "blob": "0xd257bc7d93a55be3561d720d40a6a342" + }, + { + "requester": 16909062, + "blob": "0x38db056c7c3065fadb630ce6ccbc7385" + } + ], + "guarantees": [ + { + "report": { + "package_spec": { + "hash": "0x30466e0ae1b05dde5249872475f6beeac368fd014b5a3413ceb32d3872143284", + "length": 42, + "erasure_root": "0x7af11fdaa717c398e223211842b41392f18df4bbc4ea0f4cfb972f19c7a64949", + "exports_root": "0xe8e52949f6e4fb1d943248df48c3f2b538200fddfea22388e5e061be8ebf21a7", + "exports_count": 69 + }, + "context": { + "anchor": "0xc0564c5e0de0942589df4343ad1956da66797240e2a2f2d6f8116b5047768986", + "state_root": "0xf6967658df626fa39cbfb6014b50196d23bc2cfbfa71a7591ca7715472dd2b48", + "beefy_root": "0x9329de635d4bbb8c47cdccbbc1285e48bf9dbad365af44b205343e99dea298f3", + "lookup_anchor": "0x60751ab5b251361fbfd3ad5b0e84f051ccece6b00830aed31a5354e00b20b9ed", + "lookup_anchor_slot": 33, + "prerequisites": [] + }, + "core_index": 3, + "authorizer_hash": "0x022e5e165cc8bd586404257f5cd6f5a31177b5c951eb076c7c10174f90006eef", + "auth_output": "0x0102030405", + "segment_root_lookup": [], + "results": [ + { + "service_id": 16909060, + "code_hash": "0x70a50829851e8f6a8c80f92806ae0e95eb7c06ad064e311cc39107b3219e532e", + "payload_hash": "0xfa99b97e72fcfaef616108de981a59dc3310e2a9f5e73cd44d702ecaaccd8696", + "gas": 42, + "result": { + "ok": "0xaabbcc" + } + }, + { + "service_id": 84281096, + "code_hash": "0xfcfc857dab216daf41f409c2012685846e4d34aedfeacaf84d9adfebda73fae6", + "payload_hash": "0xd55e07438aeeeb0d6509ab28af8a758d1fb70424db6b27c7e1ef6473e721c328", + "gas": 33, + "result": { + "panic": null + } + } + ] + }, + "slot": 42, + "signatures": [ + { + "validator_index": 0, + "signature": "0x5f6e74d204c2490e71be4451963d7d7da797d4fd37d6e0bda56927d02a3302ca3b3a0e08c961e7580e97a0f08c269f549728f52d9c7de3affe850a0371380012" + }, + { + "validator_index": 1, + "signature": "0x3a6813f7691895a444d72cad60e3d54d64266fbaf567d7a5816a6623edcdbafed304517e4de88a399ed4d3faa2fc86e38243acbf480a652a236c27e515939032" + } + ] + } + ], + "assurances": [ + { + "anchor": "0x0cffbf67aae50aeed3c6f8f0d9bf7d854ffd87cef8358cbbaa587a9e3bd1a776", + "bitfield": "0x01", + "validator_index": 0, + "signature": "0x2d8ec7b235be3b3cbe9be3d5ff36f082942102d64a0dc5953709a95cca55b58b1af297f534d464264be77477b547f3c596b947edbca33f6631f1aa188d25a38b" + }, + { + "anchor": "0x2398ce69c3585e1b1b574a5a7185a2a086350abd4606d15aace8b4610b494772", + "bitfield": "0x01", + "validator_index": 1, + "signature": "0xdda7a577f150ee83afedc9d3b50a4f00fcf21248e6f73097abcc4bb634f854aedc53769838d294b09c0184fb0e66f09bae8cc243f842a6cc401488591e9ffdb1" + } + ], "disputes": { "verdicts": [ { @@ -123,88 +208,6 @@ "signature": "0x557e5ee3660be9247c4908c74a46c91eebd713925dd7f2ede3ef4900ba277039f7d46ec15432116176cce8ce39d8ae21eabafdf71796eeb724ee7e4ff1dd1fd7" } ] - }, - "preimages": [ - { - "requester": 16909060, - "blob": "0x81095e6122e3bc9d961e00014a7fc833" - }, - { - "requester": 16909061, - "blob": "0xd257bc7d93a55be3561d720d40a6a342" - }, - { - "requester": 16909062, - "blob": "0x38db056c7c3065fadb630ce6ccbc7385" - } - ], - "assurances": [ - { - "anchor": "0x0cffbf67aae50aeed3c6f8f0d9bf7d854ffd87cef8358cbbaa587a9e3bd1a776", - "bitfield": "0x01", - "validator_index": 0, - "signature": "0x2d8ec7b235be3b3cbe9be3d5ff36f082942102d64a0dc5953709a95cca55b58b1af297f534d464264be77477b547f3c596b947edbca33f6631f1aa188d25a38b" - }, - { - "anchor": "0x2398ce69c3585e1b1b574a5a7185a2a086350abd4606d15aace8b4610b494772", - "bitfield": "0x01", - "validator_index": 1, - "signature": "0xdda7a577f150ee83afedc9d3b50a4f00fcf21248e6f73097abcc4bb634f854aedc53769838d294b09c0184fb0e66f09bae8cc243f842a6cc401488591e9ffdb1" - } - ], - "guarantees": [ - { - "report": { - "package_spec": { - "hash": "0x30466e0ae1b05dde5249872475f6beeac368fd014b5a3413ceb32d3872143284", - "len": 42, - "erasure_root": "0x7af11fdaa717c398e223211842b41392f18df4bbc4ea0f4cfb972f19c7a64949", - "exports_root": "0xe8e52949f6e4fb1d943248df48c3f2b538200fddfea22388e5e061be8ebf21a7" - }, - "context": { - "anchor": "0xc0564c5e0de0942589df4343ad1956da66797240e2a2f2d6f8116b5047768986", - "state_root": "0xf6967658df626fa39cbfb6014b50196d23bc2cfbfa71a7591ca7715472dd2b48", - "beefy_root": "0x9329de635d4bbb8c47cdccbbc1285e48bf9dbad365af44b205343e99dea298f3", - "lookup_anchor": "0x60751ab5b251361fbfd3ad5b0e84f051ccece6b00830aed31a5354e00b20b9ed", - "lookup_anchor_slot": 33, - "prerequisite": null - }, - "core_index": 3, - "authorizer_hash": "0x022e5e165cc8bd586404257f5cd6f5a31177b5c951eb076c7c10174f90006eef", - "auth_output": "0x0102030405", - "results": [ - { - "service": 16909060, - "code_hash": "0x70a50829851e8f6a8c80f92806ae0e95eb7c06ad064e311cc39107b3219e532e", - "payload_hash": "0xfa99b97e72fcfaef616108de981a59dc3310e2a9f5e73cd44d702ecaaccd8696", - "gas_ratio": 42, - "result": { - "ok": "0xaabbcc" - } - }, - { - "service": 84281096, - "code_hash": "0xfcfc857dab216daf41f409c2012685846e4d34aedfeacaf84d9adfebda73fae6", - "payload_hash": "0xd55e07438aeeeb0d6509ab28af8a758d1fb70424db6b27c7e1ef6473e721c328", - "gas_ratio": 33, - "result": { - "panic": null - } - } - ] - }, - "slot": 42, - "signatures": [ - { - "validator_index": 0, - "signature": "0x5f6e74d204c2490e71be4451963d7d7da797d4fd37d6e0bda56927d02a3302ca3b3a0e08c961e7580e97a0f08c269f549728f52d9c7de3affe850a0371380012" - }, - { - "validator_index": 1, - "signature": "0x3a6813f7691895a444d72cad60e3d54d64266fbaf567d7a5816a6623edcdbafed304517e4de88a399ed4d3faa2fc86e38243acbf480a652a236c27e515939032" - } - ] - } - ] + } } }