diff --git a/client/api/backend.go b/client/api/backend.go
deleted file mode 100644
index 97b1eccc07..0000000000
--- a/client/api/backend.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2023 ChainSafe Systems (ON)
-// SPDX-License-Identifier: LGPL-3.0-only
-
-package api
-
-type Key []byte
-
-type KeyValue struct {
-	Key   Key
-	Value []byte
-}
-
-// AuxStore is part of the substrate backend.
-// Provides access to an auxiliary database.
-//
-// This is a simple global database not aware of forks. Can be used for storing auxiliary
-// information like total block weight/difficulty for fork resolution purposes as a common use
-// case.
-type AuxStore interface {
-	// Insert auxiliary data into key-Value store.
-	//
-	// Deletions occur after insertions.
-	Insert(insert []KeyValue, delete []Key) error
-	// Get Query auxiliary data from key-Value store.
-	Get(key Key) (*[]byte, error)
-}
diff --git a/client/consensus/grandpa/authorities.go b/client/consensus/grandpa/authorities.go
index 2b5462685f..77ccf4d881 100644
--- a/client/consensus/grandpa/authorities.go
+++ b/client/consensus/grandpa/authorities.go
@@ -8,7 +8,6 @@ import (
 	"fmt"
 	"sync"
 
-	"github.com/ChainSafe/gossamer/pkg/scale"
 	"golang.org/x/exp/constraints"
 	"golang.org/x/exp/slices"
 )
@@ -705,64 +704,33 @@ func (asc *AuthoritySetChanges[N]) append(setID uint64, blockNumber N) {
 	})
 }
 
-type authoritySetChangeID scale.VaryingDataType
+type authoritySetChangeID any
 
-// Set will set a VaryingDataTypeValue using the underlying VaryingDataType
-func (asc *authoritySetChangeID) Set(val scale.VaryingDataTypeValue) (err error) {
-	vdt := scale.VaryingDataType(*asc)
-	err = vdt.Set(val)
-	if err != nil {
-		return
-	}
-	*asc = authoritySetChangeID(vdt)
-	return
+type authoritySetChangeIDs[N constraints.Unsigned] interface {
+	authoritySetChangeIDLatest | authoritySetChangeIDSet[N] | authoritySetChangeIDUnknown
 }
 
-// Value will return value from underying VaryingDataType
-func (asc *authoritySetChangeID) Value() (val scale.VaryingDataTypeValue, err error) {
-	vdt := scale.VaryingDataType(*asc)
-	return vdt.Value()
+func newAuthoritySetID[N constraints.Unsigned, ID authoritySetChangeIDs[N]](authSetChangeID ID) authoritySetChangeID {
+	return authoritySetChangeID(authSetChangeID)
 }
 
-func newAuthoritySetChangeID[N constraints.Unsigned]() authoritySetChangeID {
-	vdt := scale.MustNewVaryingDataType(latest{}, set[N]{}, unknown{})
-	return authoritySetChangeID(vdt)
-}
-
-type latest struct{}
-
-func (latest) Index() uint {
-	return 0
-}
+type authoritySetChangeIDLatest struct{}
 
-type set[N constraints.Unsigned] struct {
+type authoritySetChangeIDSet[N constraints.Unsigned] struct {
 	inner setIDNumber[N]
 }
 
-func (set[N]) Index() uint {
-	return 1
-}
-
-type unknown struct{}
-
-func (unknown) Index() uint {
-	return 2
-}
+type authoritySetChangeIDUnknown struct{}
 
 // Three states that can be returned: Latest, Set (tuple), Unknown
-func (asc *AuthoritySetChanges[N]) getSetID(blockNumber N) (authSetChangeID authoritySetChangeID, err error) {
+func (asc *AuthoritySetChanges[N]) getSetID(blockNumber N) (authoritySetChangeID, error) {
 	if asc == nil {
-		return authSetChangeID, fmt.Errorf("getSetID: authSetChanges is nil")
+		return nil, fmt.Errorf("getSetID: authSetChanges is nil")
 	}
-	authSetChangeID = newAuthoritySetChangeID[N]()
 	authSet := *asc
 	last := authSet[len(authSet)-1]
 	if last.BlockNumber < blockNumber {
-		err = authSetChangeID.Set(latest{})
-		if err != nil {
-			return authSetChangeID, err
-		}
-		return authSetChangeID, nil
+		return newAuthoritySetID[N](authoritySetChangeIDLatest{}), nil
 	}
 
 	idx, _ := slices.BinarySearchFunc(
@@ -786,26 +754,15 @@ func (asc *AuthoritySetChanges[N]) getSetID(blockNumber N) (authSetChangeID auth
 
 		// if this is the first index but not the first set id then we are missing data.
 		if idx == 0 && authChange.SetID != 0 {
-			err = authSetChangeID.Set(unknown{})
-			if err != nil {
-				return authSetChangeID, err
-			}
-			return authSetChangeID, nil
+			return newAuthoritySetID[N](authoritySetChangeIDUnknown{}), nil
 		}
-		err = authSetChangeID.Set(set[N]{
+
+		return newAuthoritySetID[N](authoritySetChangeIDSet[N]{
 			authChange,
-		})
-		if err != nil {
-			return authSetChangeID, err
-		}
-		return authSetChangeID, nil
+		}), nil
 	}
 
-	err = authSetChangeID.Set(unknown{})
-	if err != nil {
-		return authSetChangeID, err
-	}
-	return authSetChangeID, nil
+	return newAuthoritySetID[N](authoritySetChangeIDUnknown{}), nil
 }
 
 func (asc *AuthoritySetChanges[N]) insert(blockNumber N) {
diff --git a/client/consensus/grandpa/authorities_test.go b/client/consensus/grandpa/authorities_test.go
index 6b786d4e87..db90e54d61 100644
--- a/client/consensus/grandpa/authorities_test.go
+++ b/client/consensus/grandpa/authorities_test.go
@@ -3,7 +3,6 @@
 package grandpa
 
 import (
-	"fmt"
 	"strings"
 	"testing"
 
@@ -1205,24 +1204,19 @@ func TestCleanUpStaleForcedChangesWhenApplyingStandardChangeAlternateCase(t *tes
 
 func assertExpectedSet(t *testing.T, authSetID authoritySetChangeID, expected setIDNumber[uint]) {
 	t.Helper()
-	authSetVal, err := authSetID.Value()
-	require.NoError(t, err)
-	switch val := authSetVal.(type) {
-	case set[uint]:
+	switch val := authSetID.(type) {
+	case authoritySetChangeIDSet[uint]:
 		require.Equal(t, expected, val.inner)
 	default:
-		err = fmt.Errorf("invalid authSetID type")
+		t.FailNow()
 	}
-	require.NoError(t, err)
 }
 
 func assertUnknown(t *testing.T, authSetID authoritySetChangeID) {
 	t.Helper()
-	authSetVal, err := authSetID.Value()
-	require.NoError(t, err)
 	isUnknown := false
-	switch authSetVal.(type) {
-	case unknown:
+	switch authSetID.(type) {
+	case authoritySetChangeIDUnknown:
 		isUnknown = true
 	}
 	require.True(t, isUnknown)
@@ -1230,11 +1224,9 @@ func assertUnknown(t *testing.T, authSetID authoritySetChangeID) {
 
 func assertLatest(t *testing.T, authSetID authoritySetChangeID) {
 	t.Helper()
-	authSetVal, err := authSetID.Value()
-	require.NoError(t, err)
 	isLatest := false
-	switch authSetVal.(type) {
-	case latest:
+	switch authSetID.(type) {
+	case authoritySetChangeIDLatest:
 		isLatest = true
 	}
 	require.True(t, isLatest)
diff --git a/client/consensus/grandpa/aux_schema.go b/client/consensus/grandpa/aux_schema.go
index eedde8a3d0..e070758922 100644
--- a/client/consensus/grandpa/aux_schema.go
+++ b/client/consensus/grandpa/aux_schema.go
@@ -7,7 +7,6 @@ import (
 	"errors"
 	"fmt"
 
-	"github.com/ChainSafe/gossamer/client/api"
 	grandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa"
 	"github.com/ChainSafe/gossamer/pkg/scale"
 	"golang.org/x/exp/constraints"
@@ -22,7 +21,7 @@ var (
 	errValueNotFound = errors.New("value not found")
 )
 
-type writeAux func(insertions []api.KeyValue) error
+type writeAux func(insertions []KeyValue) error
 
 type getGenesisAuthorities[ID AuthorityID] func() ([]Authority[ID], error)
 
@@ -31,7 +30,7 @@ type persistentData[H comparable, N constraints.Unsigned, ID AuthorityID, Sig Au
 	setState     SharedVoterSetState[H, N, ID, Sig]
 }
 
-func loadDecoded(store api.AuxStore, key []byte, destination any) error {
+func loadDecoded(store AuxStore, key []byte, destination any) error {
 	encodedValue, err := store.Get(key)
 	if err != nil {
 		return err
@@ -50,7 +49,7 @@ func loadDecoded(store api.AuxStore, key []byte, destination any) error {
 }
 
 func loadPersistent[H comparable, N constraints.Unsigned, ID AuthorityID, Sig AuthoritySignature](
-	store api.AuxStore,
+	store AuxStore,
 	genesisHash H,
 	genesisNumber N,
 	genesisAuths getGenesisAuthorities[ID]) (*persistentData[H, N, ID, Sig], error) {
@@ -85,13 +84,11 @@ func loadPersistent[H comparable, N constraints.Unsigned, ID AuthorityID, Sig Au
 			}
 		}
 
-		newSharedVoterSetState := sharedVoterSetState[H, N, ID, Sig]{
-			Inner: setState,
-		}
-
 		return &persistentData[H, N, ID, Sig]{
 			authoritySet: SharedAuthoritySet[H, N, ID]{inner: *authSet},
-			setState:     SharedVoterSetState[H, N, ID, Sig]{Inner: newSharedVoterSetState}, //nolint
+			setState: SharedVoterSetState[H, N, ID, Sig]{Inner: sharedVoterSetState[H, N, ID, Sig]{
+				Inner: setState,
+			}},
 		}, nil
 	}
 
@@ -116,9 +113,9 @@ func loadPersistent[H comparable, N constraints.Unsigned, ID AuthorityID, Sig Au
 		return nil, err
 	}
 
-	insert := []api.KeyValue{
-		{authoritySetKey, scale.MustMarshal(*genesisSet)}, //nolint
-		{setStateKey, scale.MustMarshal(genesisState)},    //nolint
+	insert := []KeyValue{
+		{authoritySetKey, scale.MustMarshal(*genesisSet)},
+		{setStateKey, scale.MustMarshal(genesisState)},
 	}
 
 	err = store.Insert(insert, nil)
@@ -126,13 +123,11 @@ func loadPersistent[H comparable, N constraints.Unsigned, ID AuthorityID, Sig Au
 		return nil, err
 	}
 
-	newSharedVoterSetState := sharedVoterSetState[H, N, ID, Sig]{
-		Inner: genesisState,
-	}
-
 	return &persistentData[H, N, ID, Sig]{
 		authoritySet: SharedAuthoritySet[H, N, ID]{inner: *genesisSet},
-		setState:     SharedVoterSetState[H, N, ID, Sig]{Inner: newSharedVoterSetState}, //nolint
+		setState: SharedVoterSetState[H, N, ID, Sig]{Inner: sharedVoterSetState[H, N, ID, Sig]{
+			Inner: genesisState,
+		}},
 	}, nil
 }
 
@@ -145,7 +140,6 @@ func UpdateAuthoritySet[H comparable, N constraints.Unsigned, ID AuthorityID, Si
 	set AuthoritySet[H, N, ID],
 	newSet *NewAuthoritySetStruct[H, N, ID],
 	write writeAux) error {
-	// TODO make sure that Insert has affect of both insert and update depending on use case
 	encodedAuthSet, err := scale.Marshal(set)
 	if err != nil {
 		return err
@@ -169,9 +163,9 @@ func UpdateAuthoritySet[H comparable, N constraints.Unsigned, ID AuthorityID, Si
 			return err
 		}
 
-		insert := []api.KeyValue{
-			{authoritySetKey, encodedAuthSet}, //nolint
-			{setStateKey, encodedVoterSet},    //nolint
+		insert := []KeyValue{
+			{authoritySetKey, encodedAuthSet},
+			{setStateKey, encodedVoterSet},
 		}
 		err = write(insert)
 		if err != nil {
@@ -179,8 +173,8 @@ func UpdateAuthoritySet[H comparable, N constraints.Unsigned, ID AuthorityID, Si
 		}
 
 	} else {
-		insert := []api.KeyValue{
-			{authoritySetKey, encodedAuthSet}, //nolint
+		insert := []KeyValue{
+			{authoritySetKey, encodedAuthSet},
 		}
 
 		err = write(insert)
@@ -201,16 +195,16 @@ func updateBestJustification[
 	N constraints.Unsigned,
 	S comparable,
 	ID AuthorityID,
-	H Header[Hash, N]](
-	justification Justification[Hash, N, S, ID, H],
+](
+	justification GrandpaJustification[Hash, N, S, ID],
 	write writeAux) error {
 	encodedJustificaiton, err := scale.Marshal(justification)
 	if err != nil {
 		return fmt.Errorf("marshalling: %w", err)
 	}
 
-	insert := []api.KeyValue{
-		{bestJustification, encodedJustificaiton}, //nolint
+	insert := []KeyValue{
+		{bestJustification, encodedJustificaiton},
 	}
 	err = write(insert)
 	if err != nil {
@@ -225,15 +219,15 @@ func BestJustification[
 	N constraints.Unsigned,
 	S comparable,
 	ID AuthorityID,
-	H Header[Hash, N]](
-	store api.AuxStore) (*Justification[Hash, N, S, ID, H], error) {
-	justification := Justification[Hash, N, S, ID, H]{}
+	H Header[Hash, N],
+](store AuxStore) (*GrandpaJustification[Hash, N, S, ID], error) {
+	justification := decodeGrandpaJustification[Hash, N, S, ID, H]{}
 	err := loadDecoded(store, bestJustification, &justification)
 	if err != nil {
 		return nil, err
 	}
 
-	return &justification, nil
+	return justification.GrandpaJustification(), nil
 }
 
 // WriteVoterSetState Write voter set state.
@@ -244,8 +238,8 @@ func WriteVoterSetState[H comparable, N constraints.Unsigned, ID AuthorityID, Si
 	if err != nil {
 		return err
 	}
-	insert := []api.KeyValue{
-		{setStateKey, encodedVoterSet}, //nolint
+	insert := []KeyValue{
+		{setStateKey, encodedVoterSet},
 	}
 	err = write(insert)
 	if err != nil {
@@ -271,8 +265,8 @@ func WriteConcludedRound[H comparable, N constraints.Unsigned, ID AuthorityID, S
 		return err
 	}
 
-	insert := []api.KeyValue{
-		{key, encRoundData}, //nolint
+	insert := []KeyValue{
+		{key, encRoundData},
 	}
 	err = write(insert)
 	if err != nil {
diff --git a/client/consensus/grandpa/aux_schema_test.go b/client/consensus/grandpa/aux_schema_test.go
index 70a3434139..741552fda9 100644
--- a/client/consensus/grandpa/aux_schema_test.go
+++ b/client/consensus/grandpa/aux_schema_test.go
@@ -6,7 +6,6 @@ package grandpa
 import (
 	"testing"
 
-	"github.com/ChainSafe/gossamer/client/api"
 	finalityGrandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa"
 	"github.com/ChainSafe/gossamer/pkg/scale"
 	"github.com/stretchr/testify/require"
@@ -17,15 +16,15 @@ func genesisAuthorities[ID AuthorityID](auths []Authority[ID], err error) getGen
 	return func() ([]Authority[ID], error) { return auths, err }
 }
 
-func write(store api.AuxStore) writeAux {
-	return func(insertions []api.KeyValue) error {
+func write(store AuxStore) writeAux {
+	return func(insertions []KeyValue) error {
 		return store.Insert(insertions, nil)
 	}
 }
 
-type dummyStore []api.KeyValue
+type dummyStore []KeyValue
 
-func (client *dummyStore) Insert(insert []api.KeyValue, deleted []api.Key) error {
+func (client *dummyStore) Insert(insert []KeyValue, deleted []Key) error {
 	for _, val := range insert {
 		*client = append(*client, val)
 	}
@@ -48,7 +47,7 @@ func (client *dummyStore) Insert(insert []api.KeyValue, deleted []api.Key) error
 
 }
 
-func (client *dummyStore) Get(key api.Key) (*[]byte, error) {
+func (client *dummyStore) Get(key Key) (*[]byte, error) {
 	for _, value := range *client {
 		if slices.Equal(value.Key, key) {
 			return &value.Value, nil
@@ -64,15 +63,15 @@ func newDummyStore(t *testing.T) *dummyStore {
 
 func TestDummyStore(t *testing.T) {
 	store := newDummyStore(t)
-	insert := []api.KeyValue{
-		{authoritySetKey, scale.MustMarshal([]byte{1})}, //nolint
-		{setStateKey, scale.MustMarshal([]byte{2})},     //nolint
+	insert := []KeyValue{
+		{authoritySetKey, scale.MustMarshal([]byte{1})},
+		{setStateKey, scale.MustMarshal([]byte{2})},
 	}
 	err := store.Insert(insert, nil)
 	require.NoError(t, err)
 	require.True(t, len(*store) == 2)
 
-	del := []api.Key{setStateKey}
+	del := []Key{setStateKey}
 	err = store.Insert(nil, del)
 	require.NoError(t, err)
 	require.True(t, len(*store) == 1)
@@ -152,9 +151,9 @@ func TestLoadPersistentNotGenesis(t *testing.T) {
 	genesisState, err := NewLiveVoterSetState[string, uint, dummyAuthID, uint](0, *genesisSet, *base)
 	require.NoError(t, err)
 
-	insert := []api.KeyValue{
-		{authoritySetKey, scale.MustMarshal(*genesisSet)}, //nolint
-		{setStateKey, scale.MustMarshal(genesisState)},    //nolint
+	insert := []KeyValue{
+		{authoritySetKey, scale.MustMarshal(*genesisSet)},
+		{setStateKey, scale.MustMarshal(genesisState)},
 	}
 
 	err = store.Insert(insert, nil)
@@ -176,8 +175,8 @@ func TestLoadPersistentNotGenesis(t *testing.T) {
 
 	// Auth set written but not set state
 	store = newDummyStore(t)
-	insert = []api.KeyValue{
-		{authoritySetKey, scale.MustMarshal(*genesisSet)}, //nolint
+	insert = []KeyValue{
+		{authoritySetKey, scale.MustMarshal(*genesisSet)},
 	}
 
 	err = store.Insert(insert, nil)
@@ -380,13 +379,13 @@ func TestWriteJustification(t *testing.T) {
 	precommit := makePrecommit(t, "a", 1, 1)
 	precommits = append(precommits, precommit)
 
-	expAncestries := make([]testHeader[string, uint], 0)
+	expAncestries := make([]Header[string, uint], 0)
 	expAncestries = append(expAncestries, testHeader[string, uint]{
 		NumberField:     100,
 		ParentHashField: "a",
 	})
 
-	justification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	justification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Round: 2,
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
@@ -399,7 +398,7 @@ func TestWriteJustification(t *testing.T) {
 	_, err := BestJustification[string, uint, string, dummyAuthID, testHeader[string, uint]](store)
 	require.ErrorIs(t, err, errValueNotFound)
 
-	err = updateBestJustification[string, uint, string, dummyAuthID, testHeader[string, uint]](justification, write(store))
+	err = updateBestJustification[string, uint, string, dummyAuthID](justification, write(store))
 	require.NoError(t, err)
 
 	bestJust, err := BestJustification[string, uint, string, dummyAuthID, testHeader[string, uint]](store)
diff --git a/client/consensus/grandpa/finality_proof.go b/client/consensus/grandpa/finality_proof.go
new file mode 100644
index 0000000000..9d1262c8a3
--- /dev/null
+++ b/client/consensus/grandpa/finality_proof.go
@@ -0,0 +1,246 @@
+// Copyright 2023 ChainSafe Systems (ON)
+// SPDX-License-Identifier: LGPL-3.0-only
+
+package grandpa
+
+import (
+	"errors"
+
+	"github.com/ChainSafe/gossamer/pkg/scale"
+	"golang.org/x/exp/constraints"
+)
+
+// GRANDPA block finality proof generation and check.
+//
+// Finality of block B is proved by providing:
+// 1) the justification for the descendant block F;
+// 2) headers sub-chain (B; F] if B != F;
+// 3) proof of GRANDPA::authorities() if the set changes at block F.
+//
+// Since earliest possible justification is returned, the GRANDPA authorities set
+// at the block F is guaranteed to be the same as in the block B (this is because block
+// that enacts new GRANDPA authorities set always comes with justification). It also
+// means that the `set_id` is the same at blocks B and F.
+//
+// Let U be the last finalized block known to caller. If authorities set has changed several
+// times in the (U; F] interval, multiple finality proof fragments are returned (one for each
+// authority set change) and they must be verified in-order.
+//
+// Finality proof provider can choose how to provide finality proof on its own. The incomplete
+// finality proof (that finalises some block C that is ancestor of the B and descendant
+// of the U) could be returned.
+
+var (
+	// The requested block has not yet been finalized
+	errBlockNotYetFinalized = errors.New("block not yet finalized")
+	// The requested block is not covered by authority set changes. Likely this means the block is
+	// in the latest authority set, and the subscription API is more appropriate
+	errBlockNotInAuthoritySetChanges = errors.New("block not covered by authority set changes")
+)
+
+const maxUnknownHeaders = 100_000
+
+// FinalityProofProvider Finality proof provider for serving network requests.
+type FinalityProofProvider[
+	BE Backend[Hash, N, H, B],
+	Hash constraints.Ordered,
+	N constraints.Unsigned,
+	S comparable,
+	ID AuthorityID,
+	H Header[Hash, N],
+	B BlockchainBackend[Hash, N, H],
+] struct {
+	backend            BE
+	sharedAuthoritySet *SharedAuthoritySet[Hash, N, ID]
+}
+
+// NewFinalityProofProvider Create new finality proof provider using:
+//
+// - backend for accessing blockchain data;
+// - authorityProvider for calling and proving runtime methods.
+// - sharedAuthoritySet for accessing authority set data
+func NewFinalityProofProvider[
+	BE Backend[Hash, N, H, B],
+	Hash constraints.Ordered,
+	N constraints.Unsigned,
+	S comparable,
+	ID AuthorityID,
+	H Header[Hash, N],
+	B BlockchainBackend[Hash, N, H],
+](
+	backend BE,
+	sharedAuthSet *SharedAuthoritySet[Hash, N, ID]) *FinalityProofProvider[BE, Hash, N, S, ID, H, B] {
+	return &FinalityProofProvider[BE, Hash, N, S, ID, H, B]{
+		backend:            backend,
+		sharedAuthoritySet: sharedAuthSet,
+	}
+}
+
+// ProveFinality Prove finality for the given block number by returning a Justification for the last block of
+// the authority set in bytes.
+func (provider FinalityProofProvider[BE, H, N, S, ID, Header, B]) ProveFinality(block N) (*[]byte, error) {
+	proof, err := provider.proveFinalityProof(block, true)
+	if err != nil {
+		return nil, err
+	}
+
+	if proof != nil {
+		encodedProof, err := scale.Marshal(*proof)
+		if err != nil {
+			return nil, err
+		}
+		return &encodedProof, nil
+	}
+
+	return nil, nil
+}
+
+// Prove finality for the given block number by returning a Justification for the last block of
+// the authority set.
+//
+// If `collectUnknownHeaders` is true, the finality proof will include all headers from the
+// requested block until the block the justification refers to.
+func (provider FinalityProofProvider[BE, Hash, N, S, ID, H, B]) proveFinalityProof(
+	block N,
+	collectUnknownHeaders bool) (*FinalityProof[Hash, N, H], error) {
+	if provider.sharedAuthoritySet == nil {
+		return nil, nil
+	}
+
+	return proveFinality[BE, Hash, N, S, ID, H, B](
+		provider.backend,
+		provider.sharedAuthoritySet.inner.AuthoritySetChanges,
+		block,
+		collectUnknownHeaders,
+	)
+}
+
+// FinalityProof Finality for block B is proved by providing:
+// 1) the justification for the descendant block F;
+// 2) headers sub-chain (B; F] if B != F;
+type FinalityProof[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	// The hash of block F for which justification is provided
+	Block Hash
+	// Justification of the block F
+	Justification []byte
+	// The set of headers in the range (B; F] that we believe are unknown to the caller. Ordered.
+	UnknownHeaders []H
+}
+
+// Prove finality for the given block number by returning a justification for the last block of
+// the authority set of which the given block is part of, or a justification for the latest
+// finalized block if the given block is part of the current authority set.
+//
+// If `collectUnknownHeaders` is true, the finality proof will include all headers from the
+// requested block until the block the justification refers to.
+func proveFinality[
+	BE Backend[Hash, N, H, B],
+	Hash constraints.Ordered,
+	N constraints.Unsigned,
+	S comparable,
+	ID AuthorityID,
+	H Header[Hash, N],
+	B BlockchainBackend[Hash, N, H],
+](
+	backend BE,
+	authSetChanges AuthoritySetChanges[N],
+	block N,
+	collectUnknownHeaders bool,
+) (*FinalityProof[Hash, N, H], error) {
+	// Early-return if we are sure that there are no blocks finalized that cover the requested
+	// block.
+	finalizedNumber := backend.Blockchain().Info().FinalizedNumber
+	if finalizedNumber < block {
+		logger.Tracef("requested finality proof for descendant of %v while we only have finalized %v", block, finalizedNumber)
+		return nil, errBlockNotYetFinalized
+	}
+
+	authSetChangeID, err := authSetChanges.getSetID(block)
+	if err != nil {
+		return nil, err
+	}
+
+	var encJustification []byte
+	var justBlock N
+
+	switch val := authSetChangeID.(type) {
+	case authoritySetChangeIDLatest:
+		justification, err := BestJustification[Hash, N, S, ID, H](backend)
+		if err != nil && !errors.Is(err, errValueNotFound) {
+			return nil, err
+		}
+
+		if justification != nil {
+			encJustification, err = scale.Marshal(*justification)
+			if err != nil {
+				return nil, err
+			}
+			justBlock = justification.Target().number
+		} else {
+			logger.Trace("No justification found for the authoritySetChangeIDLatest finalized block. Returning empty proof")
+			return nil, nil
+		}
+	case authoritySetChangeIDSet[N]:
+		lastBlockForSetID, err := backend.Blockchain().ExpectBlockHashFromID(val.inner.BlockNumber)
+		if err != nil {
+			return nil, err
+		}
+
+		// If error or no justifications found, return empty proof
+		justifications, err := backend.Blockchain().Justifications(lastBlockForSetID)
+		if err != nil || justifications == nil {
+			logger.Tracef("getting justifications when making finality proof for %v. Returning empty proof",
+				block)
+			return nil, nil //nolint
+		}
+		justification := justifications.IntoJustification(GrandpaEngineID)
+		if justification != nil {
+			encJustification = *justification
+			justBlock = val.inner.BlockNumber
+		} else {
+			logger.Tracef("No justification found when making finality proof for %v. Returning empty proof",
+				block)
+			return nil, nil
+		}
+	case authoritySetChangeIDUnknown:
+		logger.Tracef("authoritySetChanges does not cover the requested block %v due to missing data."+
+			" You need to resync to populate AuthoritySetChanges properly", block)
+
+		return nil, errBlockNotInAuthoritySetChanges
+	default:
+		panic("authoritySetChangeIDUnknown type for authSetChangeID")
+	}
+
+	var headers []H
+	if collectUnknownHeaders {
+		// Collect all headers from the requested block until the last block of the set
+		current := block + 1
+		for {
+			if current > justBlock || len(headers) >= maxUnknownHeaders {
+				break
+			}
+			hash, err := backend.Blockchain().ExpectBlockHashFromID(current)
+			if err != nil {
+				return nil, err
+			}
+
+			header, err := backend.Blockchain().ExpectHeader(hash)
+			if err != nil {
+				return nil, err
+			}
+			headers = append(headers, header)
+			current += 1
+		}
+	}
+
+	blockHash, err := backend.Blockchain().ExpectBlockHashFromID(justBlock)
+	if err != nil {
+		return nil, err
+	}
+
+	return &FinalityProof[Hash, N, H]{
+		Block:          blockHash,
+		Justification:  encJustification,
+		UnknownHeaders: headers,
+	}, nil
+}
diff --git a/client/consensus/grandpa/finality_proof_test.go b/client/consensus/grandpa/finality_proof_test.go
new file mode 100644
index 0000000000..3204d234b0
--- /dev/null
+++ b/client/consensus/grandpa/finality_proof_test.go
@@ -0,0 +1,501 @@
+// Copyright 2023 ChainSafe Systems (ON)
+// SPDX-License-Identifier: LGPL-3.0-only
+
+package grandpa
+
+import (
+	"fmt"
+	"testing"
+
+	finalityGrandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa"
+	"github.com/ChainSafe/gossamer/pkg/scale"
+	"github.com/stretchr/testify/require"
+	"golang.org/x/exp/constraints"
+)
+
+// Check GRANDPA proof-of-finality for the given block.
+//
+// Returns the vector of headers that MUST be validated + imported
+// AND if at least one of those headers is invalid, all other MUST be considered invalid.
+func checkFinalityProof[
+	Hash constraints.Ordered,
+	N constraints.Unsigned,
+	S comparable,
+	H Header[Hash, N],
+	ID AuthorityID,
+](
+	currentSetID uint64,
+	currentAuthorities AuthorityList[ID],
+	remoteProof []byte,
+) (FinalityProof[Hash, N, H], error) {
+	proof := FinalityProof[Hash, N, H]{}
+	err := scale.Unmarshal(remoteProof, &proof)
+	if err != nil {
+		return FinalityProof[Hash, N, H]{}, fmt.Errorf("failed to decode finality proof %s", err)
+	}
+
+	justification := GrandpaJustification[Hash, N, S, ID]{}
+	err = scale.Unmarshal(proof.Justification, &justification)
+	if err != nil {
+		return FinalityProof[Hash, N, H]{}, fmt.Errorf("error decoding justification for header %s", err)
+	}
+
+	err = justification.Verify(currentSetID, currentAuthorities)
+	if err != nil {
+		return FinalityProof[Hash, N, H]{}, err
+	}
+
+	return proof, nil
+}
+
+func createCommit(
+	t *testing.T,
+	targetHash string,
+	targetNum uint,
+	round uint64,
+	ID dummyAuthID,
+) finalityGrandpa.Commit[string, uint, string, dummyAuthID] {
+	t.Helper()
+	precommit := finalityGrandpa.Precommit[string, uint]{
+		TargetHash:   targetHash,
+		TargetNumber: targetNum,
+	}
+
+	message := finalityGrandpa.Message[string, uint]{
+		Value: precommit,
+	}
+
+	msg := messageData[string, uint]{
+		round,
+		1,
+		message,
+	}
+
+	encMsg, err := scale.Marshal(msg)
+	require.NoError(t, err)
+
+	signedPrecommit := finalityGrandpa.SignedPrecommit[string, uint, string, dummyAuthID]{
+		Precommit: precommit,
+		ID:        ID,
+		Signature: string(encMsg),
+	}
+
+	commit := finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
+		TargetHash:   targetHash,
+		TargetNumber: targetNum,
+		Precommits:   []finalityGrandpa.SignedPrecommit[string, uint, string, dummyAuthID]{signedPrecommit},
+	}
+
+	return commit
+}
+
+func TestFinalityProof_FailsIfNoMoreLastFinalizedBlocks(t *testing.T) {
+	dummyInfo := Info[uint]{
+		FinalizedNumber: 4,
+	}
+	mockBlockchain := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain.EXPECT().Info().Return(dummyInfo).Once()
+
+	mockBackend := NewBackendMock[
+		string,
+		uint,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend.EXPECT().Blockchain().Return(mockBlockchain).Once()
+
+	// The last finalized block is 4, so we cannot provide further justifications.
+	authoritySetChanges := AuthoritySetChanges[uint]{}
+	_, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend,
+		authoritySetChanges,
+		5,
+		true)
+	require.ErrorIs(t, err, errBlockNotYetFinalized)
+}
+
+func TestFinalityProof_IsNoneIfNoJustificationKnown(t *testing.T) {
+	dummyInfo := Info[uint]{
+		FinalizedNumber: 4,
+	}
+	dummyHash := "dummyHash"
+	mockBlockchain := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain.EXPECT().Info().Return(dummyInfo).Once()
+	mockBlockchain.EXPECT().ExpectBlockHashFromID(uint(4)).Return(dummyHash, nil).Once()
+	mockBlockchain.EXPECT().Justifications(dummyHash).Return(nil, nil).Once()
+
+	mockBackend := NewBackendMock[string, uint, testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend.EXPECT().Blockchain().Return(mockBlockchain).Times(3)
+
+	authoritySetChanges := AuthoritySetChanges[uint]{}
+	authoritySetChanges.append(0, 4)
+
+	// Block 4 is finalized without justification
+	// => we can't prove finality of 3
+	proofOf3, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend,
+		authoritySetChanges,
+		3,
+		true,
+	)
+	require.NoError(t, err)
+	require.Nil(t, proofOf3)
+}
+
+func TestFinalityProof_CheckFailsWhenProofDecodeFails(t *testing.T) {
+	// When we can't decode proof from Vec<u8>
+	authorityList := AuthorityList[dummyAuthID]{}
+	_, err := checkFinalityProof[string, uint, string, testHeader[string, uint], dummyAuthID](
+		1,
+		authorityList,
+		[]byte{42},
+	)
+	require.NotNil(t, err)
+	require.ErrorContains(t, err, "failed to decode finality proof")
+}
+
+func TestFinalityProof_CheckFailsWhenProofIsEmpty(t *testing.T) {
+	// When decoded proof has zero length
+	authorityList := AuthorityList[dummyAuthID]{}
+	grandpaJustification := GrandpaJustification[string,
+		uint,
+		string,
+		dummyAuthID,
+	]{}
+	encJustification, err := scale.Marshal(grandpaJustification)
+	require.NoError(t, err)
+	_, err = checkFinalityProof[string, uint, string, testHeader[string, uint], dummyAuthID](
+		1,
+		authorityList,
+		encJustification,
+	)
+	require.NotNil(t, err)
+}
+
+func TestFinalityProof_CheckFailsWithIncompleteJustification(t *testing.T) {
+	authorityList := AuthorityList[dummyAuthID]{
+		Authority[dummyAuthID]{
+			Key:    dummyAuthID(1),
+			Weight: uint64(1),
+		},
+	}
+
+	// Create a commit without precommits
+	commit := finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
+		TargetHash:   "hash7",
+		TargetNumber: uint(7),
+	}
+
+	grandpaJust := GrandpaJustification[string, uint, string, dummyAuthID]{
+		Round:  8,
+		Commit: commit,
+	}
+
+	finalityProof := FinalityProof[string, uint, testHeader[string, uint]]{
+		Block:         "hash2",
+		Justification: scale.MustMarshal(grandpaJust),
+	}
+
+	_, err := checkFinalityProof[string, uint, string, testHeader[string, uint], dummyAuthID](
+		1,
+		authorityList,
+		scale.MustMarshal(finalityProof),
+	)
+	require.ErrorIs(t, err, errBadJustification)
+}
+
+func TestFinalityProof_CheckWorksWithCorrectJustification(t *testing.T) {
+	ID := dummyAuthID(1)
+	targetHash := "target"
+	targetNum := uint(21)
+	authorityList := AuthorityList[dummyAuthID]{
+		Authority[dummyAuthID]{
+			Key:    ID,
+			Weight: uint64(1),
+		},
+	}
+
+	commit := createCommit(t, targetHash, targetNum, 1, ID)
+	grandpaJust := GrandpaJustification[string, uint, string, dummyAuthID]{
+		Round:  8,
+		Commit: commit,
+	}
+
+	finalityProof := FinalityProof[string, uint, testHeader[string, uint]]{
+		Block:         "hash2",
+		Justification: scale.MustMarshal(grandpaJust),
+	}
+
+	newFinalityProof, err := checkFinalityProof[string, uint, string, testHeader[string, uint], dummyAuthID](
+		1,
+		authorityList,
+		scale.MustMarshal(finalityProof),
+	)
+	require.NoError(t, err)
+	require.Equal(t, finalityProof, newFinalityProof)
+}
+
+func TestFinalityProof_UsingAuthoritySetChangesFailsWithUndefinedStart(t *testing.T) {
+	dummyInfo := Info[uint]{
+		FinalizedNumber: 8,
+	}
+	mockBlockchain := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain.EXPECT().Info().Return(dummyInfo).Once()
+
+	mockBackend := NewBackendMock[string, uint, testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend.EXPECT().Blockchain().Return(mockBlockchain).Once()
+
+	// We are missing the block for the preceding set the start is not well-defined.
+	authoritySetChanges := AuthoritySetChanges[uint]{}
+	authoritySetChanges.append(1, 8)
+
+	_, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend,
+		authoritySetChanges,
+		6,
+		true,
+	)
+	require.ErrorIs(t, err, errBlockNotInAuthoritySetChanges)
+}
+
+func TestFinalityProof_UsingAuthoritySetChangesWorks(t *testing.T) {
+	ID := dummyAuthID(1)
+	header7 := testHeader[string, uint]{
+		NumberField: uint(7),
+		HashField:   "hash7",
+	}
+	header8 := testHeader[string, uint]{
+		NumberField:     uint(8),
+		HashField:       "hash8",
+		ParentHashField: "hash7",
+	}
+
+	dummyInfo := Info[uint]{
+		FinalizedNumber: 8,
+	}
+
+	round := uint64(8)
+	commit := createCommit(t, "hash8", uint(8), round, ID)
+	grandpaJust := GrandpaJustification[string, uint, string, dummyAuthID]{
+		Round:  round,
+		Commit: commit,
+	}
+
+	encJust, err := scale.Marshal(grandpaJust)
+	require.NoError(t, err)
+
+	justifications := Justifications{Justification{
+		EngineID:             GrandpaEngineID,
+		EncodedJustification: encJust,
+	}}
+
+	mockBlockchain := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain.EXPECT().Info().Return(dummyInfo).Once()
+	mockBlockchain.EXPECT().ExpectBlockHashFromID(uint(7)).Return("hash7", nil).Once()
+	mockBlockchain.EXPECT().ExpectHeader("hash7").Return(header7, nil).Once()
+	mockBlockchain.EXPECT().ExpectBlockHashFromID(uint(8)).Return("hash8", nil).Times(3)
+	mockBlockchain.EXPECT().Justifications("hash8").Return(&justifications, nil).Times(1)
+	mockBlockchain.EXPECT().ExpectHeader("hash8").Return(header8, nil).Once()
+
+	mockBackend := NewBackendMock[string, uint, testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend.EXPECT().Blockchain().Return(mockBlockchain).Times(8)
+
+	// Authority set change at block 8, so the justification stored there will be used in the
+	// FinalityProof for block 6
+	authoritySetChanges := AuthoritySetChanges[uint]{}
+	authoritySetChanges.append(0, 5)
+	authoritySetChanges.append(1, 8)
+
+	proofOf6, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend,
+		authoritySetChanges,
+		6,
+		true,
+	)
+	require.NoError(t, err)
+
+	unknownHeaders := []testHeader[string, uint]{header7, header8}
+	expFinalityProof := &FinalityProof[string, uint, testHeader[string, uint]]{
+		Block:          "hash8",
+		Justification:  encJust,
+		UnknownHeaders: unknownHeaders,
+	}
+	require.Equal(t, expFinalityProof, proofOf6)
+
+	mockBlockchain2 := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain2.EXPECT().Info().Return(dummyInfo).Once()
+	mockBlockchain2.EXPECT().ExpectBlockHashFromID(uint(8)).Return("hash8", nil).Times(2)
+	mockBlockchain2.EXPECT().Justifications("hash8").Return(&justifications, nil).Times(1)
+
+	mockBackend2 := NewBackendMock[string, uint, testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend2.EXPECT().Blockchain().Return(mockBlockchain2).Times(4)
+
+	proofOf6WithoutUnknown, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend2,
+		authoritySetChanges,
+		6,
+		false,
+	)
+	require.NoError(t, err)
+
+	expFinalityProof = &FinalityProof[string, uint, testHeader[string, uint]]{
+		Block:         "hash8",
+		Justification: encJust,
+	}
+	require.Equal(t, expFinalityProof, proofOf6WithoutUnknown)
+}
+
+func TestFinalityProof_InLastSetFailsWithoutLatest(t *testing.T) {
+	dummyInfo := Info[uint]{
+		FinalizedNumber: 8,
+	}
+	mockBlockchain := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain.EXPECT().Info().Return(dummyInfo).Once()
+
+	mockBackend := NewBackendMock[string, uint, testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend.EXPECT().Blockchain().Return(mockBlockchain).Times(1)
+	mockBackend.EXPECT().Get(Key("grandpa_best_justification")).Return(nil, nil).Times(1)
+
+	// No recent authority set change, so we are in the authoritySetChangeIDLatest set, and we will try to pickup
+	// the best stored justification, for which there is none in this case.
+	authoritySetChanges := AuthoritySetChanges[uint]{}
+	authoritySetChanges.append(0, 5)
+
+	proof, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend,
+		authoritySetChanges,
+		6,
+		true,
+	)
+	// When justification is not stored in db, return nil
+	require.NoError(t, err)
+	require.Nil(t, proof)
+}
+
+func TestFinalityProof_InLastSetUsingLatestJustificationWorks(t *testing.T) {
+	ID := dummyAuthID(1)
+	header7 := testHeader[string, uint]{
+		NumberField: uint(7),
+		HashField:   "hash7",
+	}
+	header8 := testHeader[string, uint]{
+		NumberField:     uint(8),
+		HashField:       "hash8",
+		ParentHashField: "hash7",
+	}
+
+	dummyInfo := Info[uint]{
+		FinalizedNumber: 8,
+	}
+
+	round := uint64(8)
+	commit := createCommit(t, "hash8", uint(8), round, ID)
+	grandpaJust := GrandpaJustification[string, uint, string, dummyAuthID]{
+		Round:  round,
+		Commit: commit,
+	}
+
+	encJust, err := scale.Marshal(grandpaJust)
+	require.NoError(t, err)
+
+	mockBlockchain := NewBlockchainBackendMock[string, uint, testHeader[string, uint]](t)
+	mockBlockchain.EXPECT().Info().Return(dummyInfo).Once()
+	mockBlockchain.EXPECT().ExpectBlockHashFromID(uint(7)).Return("hash7", nil).Once()
+	mockBlockchain.EXPECT().ExpectHeader("hash7").Return(header7, nil).Once()
+	mockBlockchain.EXPECT().ExpectBlockHashFromID(uint(8)).Return("hash8", nil).Times(2)
+	mockBlockchain.EXPECT().ExpectHeader("hash8").Return(header8, nil).Once()
+
+	mockBackend := NewBackendMock[string, uint, testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]]](t)
+	mockBackend.EXPECT().Blockchain().Return(mockBlockchain).Times(6)
+	mockBackend.EXPECT().Get(Key("grandpa_best_justification")).Return(&encJust, nil).Times(1)
+
+	// No recent authority set change, so we are in the authoritySetChangeIDLatest set, and will pickup the best
+	// stored justification (via mock get call)
+	authoritySetChanges := AuthoritySetChanges[uint]{}
+	authoritySetChanges.append(0, 5)
+
+	proofOf6, err := proveFinality[
+		*BackendMock[string, uint, testHeader[string, uint],
+			*BlockchainBackendMock[string, uint, testHeader[string, uint]]],
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+		*BlockchainBackendMock[string, uint, testHeader[string, uint]],
+	](
+		mockBackend,
+		authoritySetChanges,
+		6,
+		true,
+	)
+	require.NoError(t, err)
+
+	unknownHeaders := []testHeader[string, uint]{header7, header8}
+
+	expFinalityProof := &FinalityProof[string, uint, testHeader[string, uint]]{
+		Block:          "hash8",
+		Justification:  scale.MustMarshal(grandpaJust),
+		UnknownHeaders: unknownHeaders,
+	}
+	require.Equal(t, expFinalityProof, proofOf6)
+}
diff --git a/client/consensus/grandpa/helpers_test.go b/client/consensus/grandpa/helpers_test.go
new file mode 100644
index 0000000000..d329aaff0c
--- /dev/null
+++ b/client/consensus/grandpa/helpers_test.go
@@ -0,0 +1,50 @@
+// Copyright 2023 ChainSafe Systems (ON)
+// SPDX-License-Identifier: LGPL-3.0-only
+
+package grandpa
+
+import (
+	"golang.org/x/exp/constraints"
+)
+
+// //// Fulfils Header interface ////
+type testHeader[Hash constraints.Ordered, N constraints.Unsigned] struct {
+	ParentHashField Hash
+	NumberField     N
+	StateRoot       Hash
+	ExtrinsicsRoot  Hash
+	HashField       Hash
+}
+
+func (s testHeader[Hash, N]) ParentHash() Hash {
+	return s.ParentHashField
+}
+
+func (s testHeader[Hash, N]) Hash() Hash {
+	return s.HashField
+}
+
+func (s testHeader[Hash, N]) Number() N {
+	return s.NumberField
+}
+
+// //// Fulfils HeaderBackend interface //////
+type testHeaderBackend[Hash constraints.Ordered, N constraints.Unsigned] struct {
+	header *Header[Hash, N]
+}
+
+func (backend testHeaderBackend[Hash, N]) Header(hash Hash) (*Header[Hash, N], error) {
+	return backend.header, nil
+}
+
+func (backend testHeaderBackend[Hash, N]) Info() Info[N] {
+	panic("unimplemented")
+}
+
+func (backend testHeaderBackend[Hash, N]) ExpectBlockHashFromID(id N) (Hash, error) {
+	panic("unimplemented")
+}
+
+func (backend testHeaderBackend[Hash, N]) ExpectHeader(hash Hash) (Header[Hash, N], error) {
+	panic("unimplemented")
+}
diff --git a/client/consensus/grandpa/interfaces.go b/client/consensus/grandpa/interfaces.go
index b772a26efa..9d4f57dcaa 100644
--- a/client/consensus/grandpa/interfaces.go
+++ b/client/consensus/grandpa/interfaces.go
@@ -10,13 +10,134 @@ import (
 // Telemetry TODO issue #3474
 type Telemetry interface{}
 
+/*
+	Following is from api/backend
+*/
+
+type Key []byte
+
+type KeyValue struct {
+	Key   Key
+	Value []byte
+}
+
+// AuxStore is part of the substrate backend.
+// Provides access to an auxiliary database.
+//
+// This is a simple global database not aware of forks. Can be used for storing auxiliary
+// information like total block weight/difficulty for fork resolution purposes as a common use
+// case.
+// TODO should this just be in Backend?
+type AuxStore interface {
+	// Insert auxiliary data into key-Value store.
+	//
+	// Deletions occur after insertions.
+	Insert(insert []KeyValue, delete []Key) error
+	// Get Query auxiliary data from key-Value store.
+	Get(key Key) (*[]byte, error)
+}
+
+// Backend Client backend.
+//
+// Manages the data layer.
+//
+// # State Pruning
+//
+// While an object from `state_at` is alive, the state
+// should not be pruned. The backend should internally reference-count
+// its state objects.
+//
+// The same applies for live `BlockImportOperation`s: while an import operation building on a
+// parent `P` is alive, the state for `P` should not be pruned.
+//
+// # Block Pruning
+//
+// Users can pin blocks in memory by calling `pin_block`. When
+// a block would be pruned, its value is kept in an in-memory cache
+// until it is unpinned via `unpin_block`.
+//
+// While a block is pinned, its state is also preserved.
+//
+// The backend should internally reference count the number of pin / unpin calls.
+type Backend[
+	Hash constraints.Ordered,
+	N constraints.Unsigned,
+	H Header[Hash, N],
+	B BlockchainBackend[Hash, N, H]] interface {
+	AuxStore
+	Blockchain() B
+}
+
+/*
+	Following is from primitives/blockchain
+*/
+
+// HeaderBackend Blockchain database header backend. Does not perform any validation.
+// primitives/blockchains/src/backend
+type HeaderBackend[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] interface {
+	// Header Get block header. Returns None if block is not found.
+	Header(hash Hash) (*H, error)
+	// Info Get blockchain info.
+	Info() Info[N]
+	// ExpectBlockHashFromID This takes an enum blockID, but for now just using block Number N
+	ExpectBlockHashFromID(id N) (Hash, error)
+	// ExpectHeader return Header
+	ExpectHeader(hash Hash) (H, error)
+}
+
+// Info HeaderBackend blockchain info
+type Info[N constraints.Unsigned] struct {
+	FinalizedNumber N
+}
+
+// BlockchainBackend Blockchain database backend. Does not perform any validation.
+// pub trait Backend<Block: BlockT>:HeaderBackend<Block> + HeaderMetadata<Block, Error = Error
+// primitives/blockchains/src/backend
+type BlockchainBackend[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] interface {
+	HeaderBackend[Hash, N, H]
+	Justifications(hash Hash) (*Justifications, error)
+}
+
+/*
+	Following is from primitives/runtime
+*/
+
+// Header interface for header
 type Header[Hash constraints.Ordered, N constraints.Unsigned] interface {
 	ParentHash() Hash
 	Hash() Hash
 	Number() N
 }
 
-type HeaderBackend[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] interface {
-	// Header Get block header. Returns None if block is not found.
-	Header(Hash) (*H, error)
+// ConsensusEngineID ID for consensus engine
+type ConsensusEngineID [4]byte
+
+// Justification An abstraction over justification for a block's validity under a consensus algorithm.
+//
+// Essentially a finality proof. The exact formulation will vary between consensus
+// algorithms. In the case where there are multiple valid proofs, inclusion within
+// the block itself would allow swapping justifications to change the block's hash
+// (and thus fork the chain). Sending a `Justification` alongside a block instead
+// bypasses this problem.
+//
+// Each justification is provided as an encoded blob, and is tagged with an ID
+// to identify the consensus engine that generated the proof (we might have
+// multiple justifications from different engines for the same block).
+type Justification struct {
+	EngineID             ConsensusEngineID
+	EncodedJustification []byte
+}
+
+// Justifications slice of justifications
+type Justifications []Justification
+
+// IntoJustification Return a copy of the encoded justification for the given consensus
+// engine, if it exists
+func (j Justifications) IntoJustification(engineID ConsensusEngineID) *[]byte {
+	for _, justification := range j {
+		if justification.EngineID == engineID {
+			return &justification.EncodedJustification
+		}
+	}
+	return nil
 }
diff --git a/client/consensus/grandpa/justification.go b/client/consensus/grandpa/justification.go
index 31473e4c6e..21f4223479 100644
--- a/client/consensus/grandpa/justification.go
+++ b/client/consensus/grandpa/justification.go
@@ -6,6 +6,7 @@ package grandpa
 import (
 	"errors"
 	"fmt"
+	"io"
 	"reflect"
 
 	finalityGrandpa "github.com/ChainSafe/gossamer/pkg/finality-grandpa"
@@ -19,7 +20,7 @@ var (
 	errBlockNotDescendentOfBase = errors.New("block not descendent of base")
 )
 
-// Justification is a GRANDPA justification for block finality, it includes a commit message and
+// GrandpaJustification is a GRANDPA justification for block finality, it includes a commit message and
 // an ancestry proof including all headers routing all precommit target blocks
 // to the commit target block. Due to the current voting strategy the precommit
 // targets should be the same as the commit target, since honest voters don't
@@ -27,15 +28,66 @@ var (
 //
 // This is meant to be stored in the db and passed around the network to other
 // nodes, and are used by syncing nodes to prove authority set handoffs.
-type Justification[
+type GrandpaJustification[
 	Hash constraints.Ordered,
 	N constraints.Unsigned,
 	S comparable,
 	ID AuthorityID,
-	H Header[Hash, N]] struct {
+] struct {
 	Round           uint64
 	Commit          finalityGrandpa.Commit[Hash, N, S, ID]
-	VotesAncestries []H
+	VotesAncestries []Header[Hash, N]
+}
+
+// Type used for decoding grandpa justifications (can pass in generic Header type)
+type decodeGrandpaJustification[
+	Hash constraints.Ordered,
+	N constraints.Unsigned,
+	S comparable,
+	ID AuthorityID,
+	H Header[Hash, N],
+] GrandpaJustification[Hash, N, S, ID]
+
+func decodeJustification[Hash constraints.Ordered,
+	N constraints.Unsigned,
+	S comparable,
+	ID AuthorityID,
+	H Header[Hash, N],
+](encodedJustification []byte) (*GrandpaJustification[Hash, N, S, ID], error) {
+	newJustificaiton := decodeGrandpaJustification[Hash, N, S, ID, H]{}
+	err := scale.Unmarshal(encodedJustification, &newJustificaiton)
+	if err != nil {
+		return nil, err
+	}
+	return newJustificaiton.GrandpaJustification(), nil
+}
+
+func (dgj *decodeGrandpaJustification[Hash, N, S, ID, H]) UnmarshalSCALE(reader io.Reader) (err error) {
+	type roundCommitHeader struct {
+		Round   uint64
+		Commit  finalityGrandpa.Commit[Hash, N, S, ID]
+		Headers []H
+	}
+	rch := roundCommitHeader{}
+	decoder := scale.NewDecoder(reader)
+	err = decoder.Decode(&rch)
+	if err != nil {
+		return
+	}
+
+	dgj.Round = rch.Round
+	dgj.Commit = rch.Commit
+	dgj.VotesAncestries = make([]Header[Hash, N], len(rch.Headers))
+	for i, header := range rch.Headers {
+		dgj.VotesAncestries[i] = header
+	}
+	return
+}
+
+func (dgj decodeGrandpaJustification[Hash, N, S, ID, H]) GrandpaJustification() *GrandpaJustification[Hash, N, S, ID] {
+	return &GrandpaJustification[Hash, N, S, ID]{
+		dgj.Round, dgj.Commit, dgj.VotesAncestries,
+	}
 }
 
 // NewJustificationFromCommit Create a GRANDPA justification from the given commit. This method
@@ -45,12 +97,12 @@ func NewJustificationFromCommit[
 	N constraints.Unsigned,
 	S comparable,
 	ID AuthorityID,
-	H Header[Hash, N]](
-	client HeaderBackend[Hash, N, H],
+](
+	client HeaderBackend[Hash, N, Header[Hash, N]],
 	round uint64,
-	commit finalityGrandpa.Commit[Hash, N, S, ID]) (Justification[Hash, N, S, ID, H], error) {
+	commit finalityGrandpa.Commit[Hash, N, S, ID]) (GrandpaJustification[Hash, N, S, ID], error) {
 	votesAncestriesHashes := make(map[Hash]struct{})
-	voteAncestries := make([]H, 0)
+	voteAncestries := make([]Header[Hash, N], 0)
 
 	// we pick the precommit for the lowest block as the base that
 	// should serve as the root block for populating ancestry (i.e.
@@ -71,7 +123,7 @@ func NewJustificationFromCommit[
 		}
 	}
 	if minPrecommit == nil {
-		return Justification[Hash, N, S, ID, H]{},
+		return GrandpaJustification[Hash, N, S, ID]{},
 			fmt.Errorf("%w: invalid precommits for target commit", errBadJustification)
 	}
 
@@ -86,7 +138,7 @@ func NewJustificationFromCommit[
 
 			header, err := client.Header(currentHash)
 			if err != nil || header == nil {
-				return Justification[Hash, N, S, ID, H]{},
+				return GrandpaJustification[Hash, N, S, ID]{},
 					fmt.Errorf("%w: invalid precommits for target commit", errBadJustification)
 			}
 
@@ -96,7 +148,7 @@ func NewJustificationFromCommit[
 			// as base and only traverse backwards from the other blocks
 			// in the commit. but better be safe to avoid an unbound loop.
 			if currentHeader.Number() <= baseNumber {
-				return Justification[Hash, N, S, ID, H]{},
+				return GrandpaJustification[Hash, N, S, ID]{},
 					fmt.Errorf("%w: invalid precommits for target commit", errBadJustification)
 			}
 			parentHash := currentHeader.ParentHash()
@@ -111,7 +163,7 @@ func NewJustificationFromCommit[
 		}
 	}
 
-	return Justification[Hash, N, S, ID, H]{
+	return GrandpaJustification[Hash, N, S, ID]{
 		Round:           round,
 		Commit:          commit,
 		VotesAncestries: voteAncestries,
@@ -124,17 +176,15 @@ func decodeAndVerifyFinalizes[Hash constraints.Ordered,
 	N constraints.Unsigned,
 	S comparable,
 	ID AuthorityID,
-	H Header[Hash, N]](
+	H Header[Hash, N],
+](
 	encoded []byte,
 	finalizedTarget hashNumber[Hash, N],
 	setID uint64,
-	voters finalityGrandpa.VoterSet[ID]) (Justification[Hash, N, S, ID, H], error) {
-	justification := Justification[Hash, N, S, ID, H]{
-		VotesAncestries: make([]H, 0),
-	}
-	err := scale.Unmarshal(encoded, &justification)
+	voters finalityGrandpa.VoterSet[ID]) (GrandpaJustification[Hash, N, S, ID], error) {
+	justification, err := decodeJustification[Hash, N, S, ID, H](encoded)
 	if err != nil {
-		return Justification[Hash, N, S, ID, H]{}, fmt.Errorf("error decoding justification for header: %s", err)
+		return GrandpaJustification[Hash, N, S, ID]{}, fmt.Errorf("error decoding justification for header: %s", err)
 	}
 
 	decodedTarget := hashNumber[Hash, N]{
@@ -143,14 +193,14 @@ func decodeAndVerifyFinalizes[Hash constraints.Ordered,
 	}
 
 	if decodedTarget != finalizedTarget {
-		return Justification[Hash, N, S, ID, H]{}, fmt.Errorf("invalid commit target in grandpa justification")
+		return GrandpaJustification[Hash, N, S, ID]{}, fmt.Errorf("invalid commit target in grandpa justification")
 	}
 
-	return justification, justification.verifyWithVoterSet(setID, voters)
+	return *justification, justification.verifyWithVoterSet(setID, voters)
 }
 
 // Verify Validate the commit and the votes' ancestry proofs.
-func (j *Justification[Hash, N, S, ID, H]) Verify(setID uint64, authorities AuthorityList[ID]) error {
+func (j *GrandpaJustification[Hash, N, S, ID]) Verify(setID uint64, authorities AuthorityList[ID]) error {
 	var weights []finalityGrandpa.IDWeight[ID]
 	for _, authority := range authorities {
 		weight := finalityGrandpa.IDWeight[ID]{
@@ -169,7 +219,7 @@ func (j *Justification[Hash, N, S, ID, H]) Verify(setID uint64, authorities Auth
 }
 
 // Validate the commit and the votes' ancestry proofs.
-func (j *Justification[Hash, N, S, ID, H]) verifyWithVoterSet(
+func (j *GrandpaJustification[Hash, N, S, ID]) verifyWithVoterSet(
 	setID uint64,
 	voters finalityGrandpa.VoterSet[ID]) error {
 	ancestryChain := newAncestryChain[Hash, N](j.VotesAncestries)
@@ -253,7 +303,7 @@ func (j *Justification[Hash, N, S, ID, H]) verifyWithVoterSet(
 }
 
 // Target The target block NumberField and HashField that this justifications proves finality for
-func (j *Justification[Hash, N, S, ID, H]) Target() hashNumber[Hash, N] {
+func (j *GrandpaJustification[Hash, N, S, ID]) Target() hashNumber[Hash, N] {
 	return hashNumber[Hash, N]{
 		number: j.Commit.TargetNumber,
 		hash:   j.Commit.TargetHash,
diff --git a/client/consensus/grandpa/justification_test.go b/client/consensus/grandpa/justification_test.go
index f860de1ebd..3f1e3f62a5 100644
--- a/client/consensus/grandpa/justification_test.go
+++ b/client/consensus/grandpa/justification_test.go
@@ -14,36 +14,6 @@ import (
 	"golang.org/x/exp/constraints"
 )
 
-// Fulfils Header interface
-type testHeader[H constraints.Ordered, N constraints.Unsigned] struct {
-	ParentHashField H
-	NumberField     N
-	StateRoot       H
-	ExtrinsicsRoot  H
-	HashField       H
-}
-
-func (s testHeader[H, N]) ParentHash() H {
-	return s.ParentHashField
-}
-
-func (s testHeader[H, N]) Hash() H {
-	return s.HashField
-}
-
-func (s testHeader[H, N]) Number() N {
-	return s.NumberField
-}
-
-// Fulfils HeaderBackend interface
-type testBackend[H constraints.Ordered, N constraints.Unsigned, Header testHeader[H, N]] struct {
-	header *testHeader[H, N]
-}
-
-func (backend testBackend[H, N, Header]) Header(hash H) (*testHeader[H, N], error) {
-	return backend.header, nil
-}
-
 func makePrecommit(t *testing.T,
 	targetHash string,
 	targetNumber uint, id dummyAuthID) finalityGrandpa.SignedPrecommit[string, uint, string, dummyAuthID] {
@@ -62,13 +32,13 @@ func TestJustificationEncoding(t *testing.T) {
 	precommit := makePrecommit(t, "a", 1, 1)
 	precommits = append(precommits, precommit)
 
-	expAncestries := make([]testHeader[string, uint], 0)
+	expAncestries := make([]Header[string, uint], 0)
 	expAncestries = append(expAncestries, testHeader[string, uint]{
 		NumberField:     100,
 		ParentHashField: "a",
 	})
 
-	justification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	justification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Round: 2,
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
@@ -81,16 +51,21 @@ func TestJustificationEncoding(t *testing.T) {
 	encodedJustification, err := scale.Marshal(justification)
 	require.NoError(t, err)
 
-	newJustificaiton := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{}
-	err = scale.Unmarshal(encodedJustification, &newJustificaiton)
+	newJustificaiton, err := decodeJustification[
+		string,
+		uint,
+		string,
+		dummyAuthID,
+		testHeader[string, uint],
+	](encodedJustification)
 	require.NoError(t, err)
-	require.Equal(t, justification, newJustificaiton)
+	require.Equal(t, justification, *newJustificaiton)
 }
 
 func TestJustification_fromCommit(t *testing.T) {
 	commit := finalityGrandpa.Commit[string, uint, string, dummyAuthID]{}
-	client := testBackend[string, uint, testHeader[string, uint]]{}
-	_, err := NewJustificationFromCommit[string, uint, string, dummyAuthID, testHeader[string, uint]](client, 2, commit)
+	client := testHeaderBackend[string, uint]{}
+	_, err := NewJustificationFromCommit[string, uint, string, dummyAuthID](client, 2, commit)
 	require.NotNil(t, err)
 	require.ErrorIs(t, err, errBadJustification)
 	require.Equal(t, "bad justification for header: invalid precommits for target commit", err.Error())
@@ -109,9 +84,9 @@ func TestJustification_fromCommit(t *testing.T) {
 		Precommits:   precommits,
 	}
 
-	clientNil := testBackend[string, uint, testHeader[string, uint]]{}
+	clientNil := testHeaderBackend[string, uint]{}
 
-	_, err = NewJustificationFromCommit[string, uint, string, dummyAuthID, testHeader[string, uint]](
+	_, err = NewJustificationFromCommit[string, uint, string, dummyAuthID](
 		clientNil,
 		2,
 		validCommit,
@@ -121,7 +96,7 @@ func TestJustification_fromCommit(t *testing.T) {
 	require.Equal(t, "bad justification for header: invalid precommits for target commit", err.Error())
 
 	// currentHeader.Number() <= baseNumber
-	_, err = NewJustificationFromCommit[string, uint, string, dummyAuthID, testHeader[string, uint]](
+	_, err = NewJustificationFromCommit[string, uint, string, dummyAuthID](
 		client,
 		2,
 		validCommit,
@@ -131,18 +106,19 @@ func TestJustification_fromCommit(t *testing.T) {
 	require.Equal(t, "bad justification for header: invalid precommits for target commit", err.Error())
 
 	// happy path
-	clientLargeNum := testBackend[string, uint, testHeader[string, uint]]{
-		header: &testHeader[string, uint]{
-			NumberField:     100,
-			ParentHashField: "a",
-		},
+	header := Header[string, uint](testHeader[string, uint]{
+		NumberField:     100,
+		ParentHashField: "a",
+	})
+	clientLargeNum := testHeaderBackend[string, uint]{
+		header: &header,
 	}
-	expAncestries := make([]testHeader[string, uint], 0)
+	expAncestries := make([]Header[string, uint], 0)
 	expAncestries = append(expAncestries, testHeader[string, uint]{
 		NumberField:     100,
 		ParentHashField: "a",
 	})
-	expJustification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	expJustification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Round: 2,
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
@@ -151,7 +127,7 @@ func TestJustification_fromCommit(t *testing.T) {
 		},
 		VotesAncestries: expAncestries,
 	}
-	justification, err := NewJustificationFromCommit[string, uint, string, dummyAuthID, testHeader[string, uint]](
+	justification, err := NewJustificationFromCommit[string, uint, string, dummyAuthID](
 		clientLargeNum,
 		2,
 		validCommit)
@@ -170,7 +146,7 @@ func TestJustification_decodeAndVerifyFinalizes(t *testing.T) {
 	require.NotNil(t, err)
 
 	// Invalid target
-	justification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	justification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
 			TargetNumber: 1,
@@ -188,12 +164,12 @@ func TestJustification_decodeAndVerifyFinalizes(t *testing.T) {
 	require.Equal(t, "invalid commit target in grandpa justification", err.Error())
 
 	// Happy path
-	headerB := testHeader[string, uint]{
+	headerB := Header[string, uint](testHeader[string, uint]{
 		HashField:       "b",
 		ParentHashField: "a",
-	}
+	})
 
-	headerList := []testHeader[string, uint]{
+	headerList := []Header[string, uint]{
 		headerB,
 	}
 
@@ -207,7 +183,7 @@ func TestJustification_decodeAndVerifyFinalizes(t *testing.T) {
 	precommit = makePrecommit(t, "b", 2, 3)
 	precommits = append(precommits, precommit)
 
-	validJustification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	validJustification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
 			TargetNumber: 1,
@@ -242,7 +218,7 @@ func TestJustification_decodeAndVerifyFinalizes(t *testing.T) {
 func TestJustification_verify(t *testing.T) {
 	// Nil voter case
 	auths := make(AuthorityList[dummyAuthID], 0)
-	justification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{}
+	justification := GrandpaJustification[string, uint, string, dummyAuthID]{}
 	err := justification.Verify(2, auths)
 	require.ErrorIs(t, err, errInvalidAuthoritiesSet)
 
@@ -254,12 +230,12 @@ func TestJustification_verify(t *testing.T) {
 		})
 	}
 
-	headerB := testHeader[string, uint]{
+	headerB := Header[string, uint](testHeader[string, uint]{
 		HashField:       "b",
 		ParentHashField: "a",
-	}
+	})
 
-	headerList := []testHeader[string, uint]{
+	headerList := []Header[string, uint]{
 		headerB,
 	}
 
@@ -273,7 +249,7 @@ func TestJustification_verify(t *testing.T) {
 	precommit = makePrecommit(t, "b", 2, 3)
 	precommits = append(precommits, precommit)
 
-	validJustification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	validJustification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
 			TargetNumber: 1,
@@ -294,7 +270,7 @@ func TestJustification_verifyWithVoterSet(t *testing.T) {
 	}
 	voters := finalityGrandpa.NewVoterSet(IDWeights)
 
-	invalidJustification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	invalidJustification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "B",
 			TargetNumber: 2,
@@ -307,16 +283,16 @@ func TestJustification_verifyWithVoterSet(t *testing.T) {
 	require.Equal(t, err.Error(), "bad justification for header: invalid commit in grandpa justification")
 
 	// 2) visitedHashes != ancestryHashes
-	headerA := testHeader[string, uint]{
+	headerA := Header[string, uint](testHeader[string, uint]{
 		HashField: "a",
-	}
+	})
 
-	headerB := testHeader[string, uint]{
+	headerB := Header[string, uint](testHeader[string, uint]{
 		HashField:       "b",
 		ParentHashField: "a",
-	}
+	})
 
-	headerList := []testHeader[string, uint]{
+	headerList := []Header[string, uint]{
 		headerA,
 		headerB,
 	}
@@ -331,7 +307,7 @@ func TestJustification_verifyWithVoterSet(t *testing.T) {
 	precommit = makePrecommit(t, "b", 2, 3)
 	precommits = append(precommits, precommit)
 
-	validJustification := Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	validJustification := GrandpaJustification[string, uint, string, dummyAuthID]{
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
 			TargetNumber: 1,
@@ -346,11 +322,11 @@ func TestJustification_verifyWithVoterSet(t *testing.T) {
 		"invalid precommit ancestries in grandpa justification with unused headers")
 
 	// Valid case
-	headerList = []testHeader[string, uint]{
+	headerList = []Header[string, uint]{
 		headerB,
 	}
 
-	validJustification = Justification[string, uint, string, dummyAuthID, testHeader[string, uint]]{
+	validJustification = GrandpaJustification[string, uint, string, dummyAuthID]{
 		Commit: finalityGrandpa.Commit[string, uint, string, dummyAuthID]{
 			TargetHash:   "a",
 			TargetNumber: 1,
diff --git a/client/consensus/grandpa/lib.go b/client/consensus/grandpa/lib.go
index f90c872857..b2332719d4 100644
--- a/client/consensus/grandpa/lib.go
+++ b/client/consensus/grandpa/lib.go
@@ -13,6 +13,9 @@ import (
 
 var logger = log.NewFromGlobal(log.AddContext("consensus", "grandpa"))
 
+// GrandpaEngineID is the hard-coded grandpa ID
+var GrandpaEngineID = ConsensusEngineID{'F', 'R', 'N', 'K'}
+
 type AuthorityID interface {
 	constraints.Ordered
 	Verify(msg []byte, sig []byte) (bool, error)
diff --git a/client/consensus/grandpa/mocks_backend_test.go b/client/consensus/grandpa/mocks_backend_test.go
new file mode 100644
index 0000000000..55128dde61
--- /dev/null
+++ b/client/consensus/grandpa/mocks_backend_test.go
@@ -0,0 +1,173 @@
+// Code generated by mockery v2.36.1. DO NOT EDIT.
+
+package grandpa
+
+import (
+	mock "github.com/stretchr/testify/mock"
+	constraints "golang.org/x/exp/constraints"
+)
+
+// BackendMock is an autogenerated mock type for the Backend type
+type BackendMock[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N], B BlockchainBackend[Hash, N, H]] struct {
+	mock.Mock
+}
+
+type BackendMock_Expecter[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N], B BlockchainBackend[Hash, N, H]] struct {
+	mock *mock.Mock
+}
+
+func (_m *BackendMock[Hash, N, H, B]) EXPECT() *BackendMock_Expecter[Hash, N, H, B] {
+	return &BackendMock_Expecter[Hash, N, H, B]{mock: &_m.Mock}
+}
+
+// Blockchain provides a mock function with given fields:
+func (_m *BackendMock[Hash, N, H, B]) Blockchain() B {
+	ret := _m.Called()
+
+	var r0 B
+	if rf, ok := ret.Get(0).(func() B); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Get(0).(B)
+	}
+
+	return r0
+}
+
+// BackendMock_Blockchain_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Blockchain'
+type BackendMock_Blockchain_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N], B BlockchainBackend[Hash, N, H]] struct {
+	*mock.Call
+}
+
+// Blockchain is a helper method to define mock.On call
+func (_e *BackendMock_Expecter[Hash, N, H, B]) Blockchain() *BackendMock_Blockchain_Call[Hash, N, H, B] {
+	return &BackendMock_Blockchain_Call[Hash, N, H, B]{Call: _e.mock.On("Blockchain")}
+}
+
+func (_c *BackendMock_Blockchain_Call[Hash, N, H, B]) Run(run func()) *BackendMock_Blockchain_Call[Hash, N, H, B] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run()
+	})
+	return _c
+}
+
+func (_c *BackendMock_Blockchain_Call[Hash, N, H, B]) Return(_a0 B) *BackendMock_Blockchain_Call[Hash, N, H, B] {
+	_c.Call.Return(_a0)
+	return _c
+}
+
+func (_c *BackendMock_Blockchain_Call[Hash, N, H, B]) RunAndReturn(run func() B) *BackendMock_Blockchain_Call[Hash, N, H, B] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Get provides a mock function with given fields: key
+func (_m *BackendMock[Hash, N, H, B]) Get(key Key) (*[]byte, error) {
+	ret := _m.Called(key)
+
+	var r0 *[]byte
+	var r1 error
+	if rf, ok := ret.Get(0).(func(Key) (*[]byte, error)); ok {
+		return rf(key)
+	}
+	if rf, ok := ret.Get(0).(func(Key) *[]byte); ok {
+		r0 = rf(key)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*[]byte)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func(Key) error); ok {
+		r1 = rf(key)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// BackendMock_Get_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Get'
+type BackendMock_Get_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N], B BlockchainBackend[Hash, N, H]] struct {
+	*mock.Call
+}
+
+// Get is a helper method to define mock.On call
+//   - key Key
+func (_e *BackendMock_Expecter[Hash, N, H, B]) Get(key interface{}) *BackendMock_Get_Call[Hash, N, H, B] {
+	return &BackendMock_Get_Call[Hash, N, H, B]{Call: _e.mock.On("Get", key)}
+}
+
+func (_c *BackendMock_Get_Call[Hash, N, H, B]) Run(run func(key Key)) *BackendMock_Get_Call[Hash, N, H, B] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(Key))
+	})
+	return _c
+}
+
+func (_c *BackendMock_Get_Call[Hash, N, H, B]) Return(_a0 *[]byte, _a1 error) *BackendMock_Get_Call[Hash, N, H, B] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *BackendMock_Get_Call[Hash, N, H, B]) RunAndReturn(run func(Key) (*[]byte, error)) *BackendMock_Get_Call[Hash, N, H, B] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Insert provides a mock function with given fields: insert, delete
+func (_m *BackendMock[Hash, N, H, B]) Insert(insert []KeyValue, delete []Key) error {
+	ret := _m.Called(insert, delete)
+
+	var r0 error
+	if rf, ok := ret.Get(0).(func([]KeyValue, []Key) error); ok {
+		r0 = rf(insert, delete)
+	} else {
+		r0 = ret.Error(0)
+	}
+
+	return r0
+}
+
+// BackendMock_Insert_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Insert'
+type BackendMock_Insert_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N], B BlockchainBackend[Hash, N, H]] struct {
+	*mock.Call
+}
+
+// Insert is a helper method to define mock.On call
+//   - insert []KeyValue
+//   - delete []Key
+func (_e *BackendMock_Expecter[Hash, N, H, B]) Insert(insert interface{}, delete interface{}) *BackendMock_Insert_Call[Hash, N, H, B] {
+	return &BackendMock_Insert_Call[Hash, N, H, B]{Call: _e.mock.On("Insert", insert, delete)}
+}
+
+func (_c *BackendMock_Insert_Call[Hash, N, H, B]) Run(run func(insert []KeyValue, delete []Key)) *BackendMock_Insert_Call[Hash, N, H, B] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].([]KeyValue), args[1].([]Key))
+	})
+	return _c
+}
+
+func (_c *BackendMock_Insert_Call[Hash, N, H, B]) Return(_a0 error) *BackendMock_Insert_Call[Hash, N, H, B] {
+	_c.Call.Return(_a0)
+	return _c
+}
+
+func (_c *BackendMock_Insert_Call[Hash, N, H, B]) RunAndReturn(run func([]KeyValue, []Key) error) *BackendMock_Insert_Call[Hash, N, H, B] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// NewBackendMock creates a new instance of BackendMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+// The first argument is typically a *testing.T value.
+func NewBackendMock[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N], B BlockchainBackend[Hash, N, H]](t interface {
+	mock.TestingT
+	Cleanup(func())
+}) *BackendMock[Hash, N, H, B] {
+	mock := &BackendMock[Hash, N, H, B]{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/client/consensus/grandpa/mocks_blockchainBackend_test.go b/client/consensus/grandpa/mocks_blockchainBackend_test.go
new file mode 100644
index 0000000000..6f05978f71
--- /dev/null
+++ b/client/consensus/grandpa/mocks_blockchainBackend_test.go
@@ -0,0 +1,288 @@
+// Code generated by mockery v2.36.1. DO NOT EDIT.
+
+package grandpa
+
+import (
+	mock "github.com/stretchr/testify/mock"
+	constraints "golang.org/x/exp/constraints"
+)
+
+// BlockchainBackendMock is an autogenerated mock type for the BlockchainBackend type
+type BlockchainBackendMock[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	mock.Mock
+}
+
+type BlockchainBackendMock_Expecter[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	mock *mock.Mock
+}
+
+func (_m *BlockchainBackendMock[Hash, N, H]) EXPECT() *BlockchainBackendMock_Expecter[Hash, N, H] {
+	return &BlockchainBackendMock_Expecter[Hash, N, H]{mock: &_m.Mock}
+}
+
+// ExpectBlockHashFromID provides a mock function with given fields: id
+func (_m *BlockchainBackendMock[Hash, N, H]) ExpectBlockHashFromID(id N) (Hash, error) {
+	ret := _m.Called(id)
+
+	var r0 Hash
+	var r1 error
+	if rf, ok := ret.Get(0).(func(N) (Hash, error)); ok {
+		return rf(id)
+	}
+	if rf, ok := ret.Get(0).(func(N) Hash); ok {
+		r0 = rf(id)
+	} else {
+		r0 = ret.Get(0).(Hash)
+	}
+
+	if rf, ok := ret.Get(1).(func(N) error); ok {
+		r1 = rf(id)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// BlockchainBackendMock_ExpectBlockHashFromID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExpectBlockHashFromID'
+type BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// ExpectBlockHashFromID is a helper method to define mock.On call
+//   - id N
+func (_e *BlockchainBackendMock_Expecter[Hash, N, H]) ExpectBlockHashFromID(id interface{}) *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	return &BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]{Call: _e.mock.On("ExpectBlockHashFromID", id)}
+}
+
+func (_c *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]) Run(run func(id N)) *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(N))
+	})
+	return _c
+}
+
+func (_c *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]) Return(_a0 Hash, _a1 error) *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]) RunAndReturn(run func(N) (Hash, error)) *BlockchainBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// ExpectHeader provides a mock function with given fields: hash
+func (_m *BlockchainBackendMock[Hash, N, H]) ExpectHeader(hash Hash) (H, error) {
+	ret := _m.Called(hash)
+
+	var r0 H
+	var r1 error
+	if rf, ok := ret.Get(0).(func(Hash) (H, error)); ok {
+		return rf(hash)
+	}
+	if rf, ok := ret.Get(0).(func(Hash) H); ok {
+		r0 = rf(hash)
+	} else {
+		r0 = ret.Get(0).(H)
+	}
+
+	if rf, ok := ret.Get(1).(func(Hash) error); ok {
+		r1 = rf(hash)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// BlockchainBackendMock_ExpectHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExpectHeader'
+type BlockchainBackendMock_ExpectHeader_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// ExpectHeader is a helper method to define mock.On call
+//   - hash Hash
+func (_e *BlockchainBackendMock_Expecter[Hash, N, H]) ExpectHeader(hash interface{}) *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H] {
+	return &BlockchainBackendMock_ExpectHeader_Call[Hash, N, H]{Call: _e.mock.On("ExpectHeader", hash)}
+}
+
+func (_c *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H]) Run(run func(hash Hash)) *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(Hash))
+	})
+	return _c
+}
+
+func (_c *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H]) Return(_a0 H, _a1 error) *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H]) RunAndReturn(run func(Hash) (H, error)) *BlockchainBackendMock_ExpectHeader_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Header provides a mock function with given fields: hash
+func (_m *BlockchainBackendMock[Hash, N, H]) Header(hash Hash) (*H, error) {
+	ret := _m.Called(hash)
+
+	var r0 *H
+	var r1 error
+	if rf, ok := ret.Get(0).(func(Hash) (*H, error)); ok {
+		return rf(hash)
+	}
+	if rf, ok := ret.Get(0).(func(Hash) *H); ok {
+		r0 = rf(hash)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*H)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func(Hash) error); ok {
+		r1 = rf(hash)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// BlockchainBackendMock_Header_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Header'
+type BlockchainBackendMock_Header_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// Header is a helper method to define mock.On call
+//   - hash Hash
+func (_e *BlockchainBackendMock_Expecter[Hash, N, H]) Header(hash interface{}) *BlockchainBackendMock_Header_Call[Hash, N, H] {
+	return &BlockchainBackendMock_Header_Call[Hash, N, H]{Call: _e.mock.On("Header", hash)}
+}
+
+func (_c *BlockchainBackendMock_Header_Call[Hash, N, H]) Run(run func(hash Hash)) *BlockchainBackendMock_Header_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(Hash))
+	})
+	return _c
+}
+
+func (_c *BlockchainBackendMock_Header_Call[Hash, N, H]) Return(_a0 *H, _a1 error) *BlockchainBackendMock_Header_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *BlockchainBackendMock_Header_Call[Hash, N, H]) RunAndReturn(run func(Hash) (*H, error)) *BlockchainBackendMock_Header_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Info provides a mock function with given fields:
+func (_m *BlockchainBackendMock[Hash, N, H]) Info() Info[N] {
+	ret := _m.Called()
+
+	var r0 Info[N]
+	if rf, ok := ret.Get(0).(func() Info[N]); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Get(0).(Info[N])
+	}
+
+	return r0
+}
+
+// BlockchainBackendMock_Info_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Info'
+type BlockchainBackendMock_Info_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// Info is a helper method to define mock.On call
+func (_e *BlockchainBackendMock_Expecter[Hash, N, H]) Info() *BlockchainBackendMock_Info_Call[Hash, N, H] {
+	return &BlockchainBackendMock_Info_Call[Hash, N, H]{Call: _e.mock.On("Info")}
+}
+
+func (_c *BlockchainBackendMock_Info_Call[Hash, N, H]) Run(run func()) *BlockchainBackendMock_Info_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run()
+	})
+	return _c
+}
+
+func (_c *BlockchainBackendMock_Info_Call[Hash, N, H]) Return(_a0 Info[N]) *BlockchainBackendMock_Info_Call[Hash, N, H] {
+	_c.Call.Return(_a0)
+	return _c
+}
+
+func (_c *BlockchainBackendMock_Info_Call[Hash, N, H]) RunAndReturn(run func() Info[N]) *BlockchainBackendMock_Info_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Justifications provides a mock function with given fields: hash
+func (_m *BlockchainBackendMock[Hash, N, H]) Justifications(hash Hash) (*Justifications, error) {
+	ret := _m.Called(hash)
+
+	var r0 *Justifications
+	var r1 error
+	if rf, ok := ret.Get(0).(func(Hash) (*Justifications, error)); ok {
+		return rf(hash)
+	}
+	if rf, ok := ret.Get(0).(func(Hash) *Justifications); ok {
+		r0 = rf(hash)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*Justifications)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func(Hash) error); ok {
+		r1 = rf(hash)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// BlockchainBackendMock_Justifications_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Justifications'
+type BlockchainBackendMock_Justifications_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// Justifications is a helper method to define mock.On call
+//   - hash Hash
+func (_e *BlockchainBackendMock_Expecter[Hash, N, H]) Justifications(hash interface{}) *BlockchainBackendMock_Justifications_Call[Hash, N, H] {
+	return &BlockchainBackendMock_Justifications_Call[Hash, N, H]{Call: _e.mock.On("Justifications", hash)}
+}
+
+func (_c *BlockchainBackendMock_Justifications_Call[Hash, N, H]) Run(run func(hash Hash)) *BlockchainBackendMock_Justifications_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(Hash))
+	})
+	return _c
+}
+
+func (_c *BlockchainBackendMock_Justifications_Call[Hash, N, H]) Return(_a0 *Justifications, _a1 error) *BlockchainBackendMock_Justifications_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *BlockchainBackendMock_Justifications_Call[Hash, N, H]) RunAndReturn(run func(Hash) (*Justifications, error)) *BlockchainBackendMock_Justifications_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// NewBlockchainBackendMock creates a new instance of BlockchainBackendMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+// The first argument is typically a *testing.T value.
+func NewBlockchainBackendMock[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]](t interface {
+	mock.TestingT
+	Cleanup(func())
+}) *BlockchainBackendMock[Hash, N, H] {
+	mock := &BlockchainBackendMock[Hash, N, H]{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}
diff --git a/client/consensus/grandpa/mocks_generate_test.go b/client/consensus/grandpa/mocks_generate_test.go
new file mode 100644
index 0000000000..9566d88971
--- /dev/null
+++ b/client/consensus/grandpa/mocks_generate_test.go
@@ -0,0 +1,8 @@
+// Copyright 2023 ChainSafe Systems (ON)
+// SPDX-License-Identifier: LGPL-3.0-only
+
+package grandpa
+
+//go:generate mockery --name=Backend --filename=mocks_backend_test.go --output=./ --structname=BackendMock --inpackage --with-expecter=true
+//go:generate mockery --name=BlockchainBackend --filename=mocks_blockchainBackend_test.go --output=./ --structname=BlockchainBackendMock --inpackage --with-expecter=true
+//go:generate mockery --name=HeaderBackend --filename=mocks_headerBackend_test.go --output=./ --structname=HeaderBackendMock --inpackage --with-expecter=true
diff --git a/client/consensus/grandpa/mocks_headerBackend_test.go b/client/consensus/grandpa/mocks_headerBackend_test.go
new file mode 100644
index 0000000000..1f58c8f41f
--- /dev/null
+++ b/client/consensus/grandpa/mocks_headerBackend_test.go
@@ -0,0 +1,234 @@
+// Code generated by mockery v2.36.1. DO NOT EDIT.
+
+package grandpa
+
+import (
+	mock "github.com/stretchr/testify/mock"
+	constraints "golang.org/x/exp/constraints"
+)
+
+// HeaderBackendMock is an autogenerated mock type for the HeaderBackend type
+type HeaderBackendMock[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	mock.Mock
+}
+
+type HeaderBackendMock_Expecter[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	mock *mock.Mock
+}
+
+func (_m *HeaderBackendMock[Hash, N, H]) EXPECT() *HeaderBackendMock_Expecter[Hash, N, H] {
+	return &HeaderBackendMock_Expecter[Hash, N, H]{mock: &_m.Mock}
+}
+
+// ExpectBlockHashFromID provides a mock function with given fields: id
+func (_m *HeaderBackendMock[Hash, N, H]) ExpectBlockHashFromID(id N) (Hash, error) {
+	ret := _m.Called(id)
+
+	var r0 Hash
+	var r1 error
+	if rf, ok := ret.Get(0).(func(N) (Hash, error)); ok {
+		return rf(id)
+	}
+	if rf, ok := ret.Get(0).(func(N) Hash); ok {
+		r0 = rf(id)
+	} else {
+		r0 = ret.Get(0).(Hash)
+	}
+
+	if rf, ok := ret.Get(1).(func(N) error); ok {
+		r1 = rf(id)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// HeaderBackendMock_ExpectBlockHashFromID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExpectBlockHashFromID'
+type HeaderBackendMock_ExpectBlockHashFromID_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// ExpectBlockHashFromID is a helper method to define mock.On call
+//   - id N
+func (_e *HeaderBackendMock_Expecter[Hash, N, H]) ExpectBlockHashFromID(id interface{}) *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	return &HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]{Call: _e.mock.On("ExpectBlockHashFromID", id)}
+}
+
+func (_c *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]) Run(run func(id N)) *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(N))
+	})
+	return _c
+}
+
+func (_c *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]) Return(_a0 Hash, _a1 error) *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H]) RunAndReturn(run func(N) (Hash, error)) *HeaderBackendMock_ExpectBlockHashFromID_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// ExpectHeader provides a mock function with given fields: hash
+func (_m *HeaderBackendMock[Hash, N, H]) ExpectHeader(hash Hash) (H, error) {
+	ret := _m.Called(hash)
+
+	var r0 H
+	var r1 error
+	if rf, ok := ret.Get(0).(func(Hash) (H, error)); ok {
+		return rf(hash)
+	}
+	if rf, ok := ret.Get(0).(func(Hash) H); ok {
+		r0 = rf(hash)
+	} else {
+		r0 = ret.Get(0).(H)
+	}
+
+	if rf, ok := ret.Get(1).(func(Hash) error); ok {
+		r1 = rf(hash)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// HeaderBackendMock_ExpectHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ExpectHeader'
+type HeaderBackendMock_ExpectHeader_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// ExpectHeader is a helper method to define mock.On call
+//   - hash Hash
+func (_e *HeaderBackendMock_Expecter[Hash, N, H]) ExpectHeader(hash interface{}) *HeaderBackendMock_ExpectHeader_Call[Hash, N, H] {
+	return &HeaderBackendMock_ExpectHeader_Call[Hash, N, H]{Call: _e.mock.On("ExpectHeader", hash)}
+}
+
+func (_c *HeaderBackendMock_ExpectHeader_Call[Hash, N, H]) Run(run func(hash Hash)) *HeaderBackendMock_ExpectHeader_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(Hash))
+	})
+	return _c
+}
+
+func (_c *HeaderBackendMock_ExpectHeader_Call[Hash, N, H]) Return(_a0 H, _a1 error) *HeaderBackendMock_ExpectHeader_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *HeaderBackendMock_ExpectHeader_Call[Hash, N, H]) RunAndReturn(run func(Hash) (H, error)) *HeaderBackendMock_ExpectHeader_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Header provides a mock function with given fields: hash
+func (_m *HeaderBackendMock[Hash, N, H]) Header(hash Hash) (*H, error) {
+	ret := _m.Called(hash)
+
+	var r0 *H
+	var r1 error
+	if rf, ok := ret.Get(0).(func(Hash) (*H, error)); ok {
+		return rf(hash)
+	}
+	if rf, ok := ret.Get(0).(func(Hash) *H); ok {
+		r0 = rf(hash)
+	} else {
+		if ret.Get(0) != nil {
+			r0 = ret.Get(0).(*H)
+		}
+	}
+
+	if rf, ok := ret.Get(1).(func(Hash) error); ok {
+		r1 = rf(hash)
+	} else {
+		r1 = ret.Error(1)
+	}
+
+	return r0, r1
+}
+
+// HeaderBackendMock_Header_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Header'
+type HeaderBackendMock_Header_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// Header is a helper method to define mock.On call
+//   - hash Hash
+func (_e *HeaderBackendMock_Expecter[Hash, N, H]) Header(hash interface{}) *HeaderBackendMock_Header_Call[Hash, N, H] {
+	return &HeaderBackendMock_Header_Call[Hash, N, H]{Call: _e.mock.On("Header", hash)}
+}
+
+func (_c *HeaderBackendMock_Header_Call[Hash, N, H]) Run(run func(hash Hash)) *HeaderBackendMock_Header_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run(args[0].(Hash))
+	})
+	return _c
+}
+
+func (_c *HeaderBackendMock_Header_Call[Hash, N, H]) Return(_a0 *H, _a1 error) *HeaderBackendMock_Header_Call[Hash, N, H] {
+	_c.Call.Return(_a0, _a1)
+	return _c
+}
+
+func (_c *HeaderBackendMock_Header_Call[Hash, N, H]) RunAndReturn(run func(Hash) (*H, error)) *HeaderBackendMock_Header_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// Info provides a mock function with given fields:
+func (_m *HeaderBackendMock[Hash, N, H]) Info() Info[N] {
+	ret := _m.Called()
+
+	var r0 Info[N]
+	if rf, ok := ret.Get(0).(func() Info[N]); ok {
+		r0 = rf()
+	} else {
+		r0 = ret.Get(0).(Info[N])
+	}
+
+	return r0
+}
+
+// HeaderBackendMock_Info_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Info'
+type HeaderBackendMock_Info_Call[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]] struct {
+	*mock.Call
+}
+
+// Info is a helper method to define mock.On call
+func (_e *HeaderBackendMock_Expecter[Hash, N, H]) Info() *HeaderBackendMock_Info_Call[Hash, N, H] {
+	return &HeaderBackendMock_Info_Call[Hash, N, H]{Call: _e.mock.On("Info")}
+}
+
+func (_c *HeaderBackendMock_Info_Call[Hash, N, H]) Run(run func()) *HeaderBackendMock_Info_Call[Hash, N, H] {
+	_c.Call.Run(func(args mock.Arguments) {
+		run()
+	})
+	return _c
+}
+
+func (_c *HeaderBackendMock_Info_Call[Hash, N, H]) Return(_a0 Info[N]) *HeaderBackendMock_Info_Call[Hash, N, H] {
+	_c.Call.Return(_a0)
+	return _c
+}
+
+func (_c *HeaderBackendMock_Info_Call[Hash, N, H]) RunAndReturn(run func() Info[N]) *HeaderBackendMock_Info_Call[Hash, N, H] {
+	_c.Call.Return(run)
+	return _c
+}
+
+// NewHeaderBackendMock creates a new instance of HeaderBackendMock. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
+// The first argument is typically a *testing.T value.
+func NewHeaderBackendMock[Hash constraints.Ordered, N constraints.Unsigned, H Header[Hash, N]](t interface {
+	mock.TestingT
+	Cleanup(func())
+}) *HeaderBackendMock[Hash, N, H] {
+	mock := &HeaderBackendMock[Hash, N, H]{}
+	mock.Mock.Test(t)
+
+	t.Cleanup(func() { mock.AssertExpectations(t) })
+
+	return mock
+}