Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/ci_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ on:
- minor
- major

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
lint:
uses: ./.github/workflows/lint.yml
Expand Down
8 changes: 6 additions & 2 deletions .github/workflows/integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ on:
branches: ["main"]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
liveness:
name: Test with Rollkit Chain
Expand Down Expand Up @@ -345,7 +349,7 @@ jobs:
ROLLKIT_ADDR=$(./gmd/go/bin/gmd keys show carol -a --home ./gmd/.gm)
CELESTIA_ADDR=$(celestia-appd keys show validator -a --keyring-backend test)
./gmd/go/bin/gmd tx ibc-transfer transfer transfer channel-0 $CELESTIA_ADDR 100stake --from carol -y --home ./gmd/.gm
sleep 20
sleep 15
BALANCE=$(celestia-appd query bank balances $CELESTIA_ADDR --output json --node http://localhost:26654 | jq '.balances')
echo "Celestia balance after IBC transfer: $BALANCE"
# TODO: check that the balance is correct
Expand All @@ -356,7 +360,7 @@ jobs:
ROLLKIT_ADDR=$(./gmd/go/bin/gmd keys show carol -a --home ./gmd/.gm)
CELESTIA_ADDR=$(celestia-appd keys show validator -a --keyring-backend test)
celestia-appd tx ibc-transfer transfer transfer channel-0 $ROLLKIT_ADDR 100utia --from validator --node http://localhost:26654 --fees 400utia --keyring-backend test -y
sleep 20
sleep 15
BALANCE=$(./gmd/go/bin/gmd query bank balances $ROLLKIT_ADDR --output json --home ./gmd/.gm | jq '.balances')
echo "Gm balance after IBC transfer: $BALANCE"
# fail if no denom starts with ibc/
Expand Down
196 changes: 90 additions & 106 deletions pkg/adapter/adapter.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (

"cosmossdk.io/log"
abci "github.com/cometbft/cometbft/abci/types"
"github.com/cometbft/cometbft/config"
cmtcfg "github.com/cometbft/cometbft/config"
Comment thread
julienrbrt marked this conversation as resolved.
"github.com/cometbft/cometbft/libs/bytes"
"github.com/cometbft/cometbft/mempool"
corep2p "github.com/cometbft/cometbft/p2p"
Expand Down Expand Up @@ -44,7 +44,7 @@ type P2PClientInfo interface {
}

// LoadGenesisDoc returns the genesis document from the provided config file.
func LoadGenesisDoc(cfg *config.Config) (*cmttypes.GenesisDoc, error) {
func LoadGenesisDoc(cfg *cmtcfg.Config) (*cmttypes.GenesisDoc, error) {
genesisFile := cfg.GenesisFile()
doc, err := cmttypes.GenesisDocFromFile(genesisFile)
if err != nil {
Expand Down Expand Up @@ -80,7 +80,7 @@ func NewABCIExecutor(
p2pClient *rollkitp2p.Client,
p2pMetrics *rollkitp2p.Metrics,
logger log.Logger,
cfg *config.Config,
cfg *cmtcfg.Config,
appGenesis *genutiltypes.AppGenesis,
metrics *Metrics,
) *Adapter {
Expand All @@ -93,8 +93,8 @@ func NewABCIExecutor(
Prefix: ds.NewKey(rollnode.RollkitPrefix),
})
rollkitStore := rstore.New(rollkitPrefixStore)
// Create a new Store with ABCI prefix
abciStore := NewStore(store)

abciStore := NewExecABCIStore(store)

a := &Adapter{
App: app,
Expand Down Expand Up @@ -302,40 +302,9 @@ func (a *Adapter) ExecuteTxs(
return nil, 0, fmt.Errorf("rollkit header not found in context")
}

var proposedLastCommit abci.CommitInfo
var lastCommit *cmttypes.Commit

if blockHeight > 1 {
header, data, err := a.RollkitStore.GetBlockData(ctx, blockHeight-1)
if err != nil {
return nil, 0, fmt.Errorf("failed to get previous block data: %w", err)
}

commitForPrevBlock := &cmttypes.Commit{
Height: int64(header.Height()),
Round: 0,
BlockID: cmttypes.BlockID{Hash: bytes.HexBytes(header.Hash()), PartSetHeader: cmttypes.PartSetHeader{Total: 1, Hash: bytes.HexBytes(data.Hash())}},
Signatures: []cmttypes.CommitSig{
{
BlockIDFlag: cmttypes.BlockIDFlagCommit,
ValidatorAddress: cmttypes.Address(header.ProposerAddress),
Timestamp: header.Time(),
Signature: header.Signature,
},
},
}

lastCommit = commitForPrevBlock
proposedLastCommit = cometCommitToABCICommitInfo(commitForPrevBlock)
} else {
// For the first block, ProposedLastCommit is empty
proposedLastCommit = abci.CommitInfo{Round: 0, Votes: []abci.VoteInfo{}}
lastCommit = &cmttypes.Commit{
Height: int64(blockHeight),
Round: 0,
BlockID: cmttypes.BlockID{},
Signatures: []cmttypes.CommitSig{},
}
lastCommit, err := a.getLastCommit(ctx, blockHeight)
if err != nil {
return nil, 0, fmt.Errorf("failed to get last commit: %w", err)
}

emptyBlock, err := cometcompat.ToABCIBlock(header, &types.Data{}, lastCommit)
Expand All @@ -348,7 +317,7 @@ func (a *Adapter) ExecuteTxs(
Height: int64(blockHeight),
Time: timestamp,
Txs: txs,
ProposedLastCommit: proposedLastCommit,
ProposedLastCommit: cometCommitToABCICommitInfo(lastCommit),
Misbehavior: []abci.Misbehavior{},
ProposerAddress: s.Validators.Proposer.Address,
NextValidatorsHash: s.NextValidators.Hash(),
Expand Down Expand Up @@ -464,78 +433,65 @@ func (a *Adapter) ExecuteTxs(
cmtTxs[i] = txs[i]
}

commit := &cmttypes.Commit{
Height: int64(blockHeight),
Round: 0,
Signatures: []cmttypes.CommitSig{
// if blockheight is 0, we create a signed last commit.
if blockHeight == 0 {
lastCommit.Signatures = []cmttypes.CommitSig{
{
BlockIDFlag: cmttypes.BlockIDFlagCommit,
ValidatorAddress: s.Validators.Proposer.Address,
Timestamp: time.Now().UTC(),
Signature: []byte{},
},
},
}
}

if blockHeight > 1 {
header, data, err := a.RollkitStore.GetBlockData(ctx, blockHeight-1)
if err != nil {
return nil, 0, fmt.Errorf("failed to get previous block data: %w", err)
}
block := s.MakeBlock(int64(blockHeight), cmtTxs, lastCommit, nil, s.Validators.Proposer.Address)

commit = &cmttypes.Commit{
Height: int64(header.Height()),
Round: 0,
BlockID: cmttypes.BlockID{Hash: bytes.HexBytes(header.Hash()), PartSetHeader: cmttypes.PartSetHeader{Total: 1, Hash: bytes.HexBytes(data.Hash())}},
Signatures: []cmttypes.CommitSig{
{
BlockIDFlag: cmttypes.BlockIDFlagCommit,
ValidatorAddress: cmttypes.Address(header.ProposerAddress),
Timestamp: header.Time(),
Signature: header.Signature,
},
},
}
currentBlockID := cmttypes.BlockID{
Hash: block.Hash(),
PartSetHeader: cmttypes.PartSetHeader{Total: 1, Hash: block.DataHash},
}

block := s.MakeBlock(int64(blockHeight), cmtTxs, commit, nil, s.Validators.Proposer.Address)

currentBlockID := cmttypes.BlockID{Hash: block.Hash(), PartSetHeader: cmttypes.PartSetHeader{Total: 1, Hash: block.DataHash}}
if err := fireEvents(a.EventBus, block, currentBlockID, fbResp, validatorUpdates); err != nil {
a.Logger.Error("failed to fire events", "err", err)
}

fireEvents(a.Logger, a.EventBus, block, currentBlockID, fbResp, validatorUpdates)
// save the finalized block response
if err := a.Store.SaveBlockResponse(ctx, blockHeight, fbResp); err != nil {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nice, supporting this in queries now.
I was wondering if we need a prune mechanism now? WDYT? If needed, this can be done in a new PR as well.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

return nil, 0, fmt.Errorf("failed to save block response: %w", err)
}

a.Logger.Info("block executed successfully", "height", blockHeight, "appHash", fmt.Sprintf("%X", fbResp.AppHash))
return fbResp.AppHash, uint64(s.ConsensusParams.Block.MaxBytes), nil
}

func fireEvents(
logger log.Logger,
eventBus cmttypes.BlockEventPublisher,
block *cmttypes.Block,
blockID cmttypes.BlockID,
abciResponse *abci.ResponseFinalizeBlock,
validatorUpdates []*cmttypes.Validator,
) {
) error {
if err := eventBus.PublishEventNewBlock(cmttypes.EventDataNewBlock{
Block: block,
BlockID: blockID,
ResultFinalizeBlock: *abciResponse,
}); err != nil {
logger.Error("failed publishing new block", "err", err)
return fmt.Errorf("failed publishing new block: %w", err)
}

if err := eventBus.PublishEventNewBlockHeader(cmttypes.EventDataNewBlockHeader{
Header: block.Header,
}); err != nil {
logger.Error("failed publishing new block header", "err", err)
return fmt.Errorf("failed publishing new block header: %w", err)
}

if err := eventBus.PublishEventNewBlockEvents(cmttypes.EventDataNewBlockEvents{
Height: block.Height,
Events: abciResponse.Events,
NumTxs: int64(len(block.Txs)),
}); err != nil {
logger.Error("failed publishing new block events", "err", err)
return fmt.Errorf("failed publishing new block events: %w", err)
}

if len(block.Evidence.Evidence) != 0 {
Expand All @@ -544,7 +500,7 @@ func fireEvents(
Evidence: ev,
Height: block.Height,
}); err != nil {
logger.Error("failed publishing new evidence", "err", err)
return fmt.Errorf("failed publishing new evidence: %w", err)
}
}
}
Expand All @@ -556,15 +512,74 @@ func fireEvents(
Tx: tx,
Result: *(abciResponse.TxResults[i]),
}}); err != nil {
logger.Error("failed publishing event TX", "err", err)
return fmt.Errorf("failed publishing event TX: %w", err)
}
}

if len(validatorUpdates) > 0 {
if err := eventBus.PublishEventValidatorSetUpdates(
cmttypes.EventDataValidatorSetUpdates{ValidatorUpdates: validatorUpdates}); err != nil {
logger.Error("failed publishing event", "err", err)
return fmt.Errorf("failed publishing event: %w", err)
}
}

return nil
}

func (a *Adapter) getLastCommit(ctx context.Context, blockHeight uint64) (*cmttypes.Commit, error) {
if blockHeight > 1 {
header, data, err := a.RollkitStore.GetBlockData(ctx, blockHeight-1)
if err != nil {
return nil, fmt.Errorf("failed to get previous block data: %w", err)
}

commitForPrevBlock := &cmttypes.Commit{
Height: int64(header.Height()),
Round: 0,
BlockID: cmttypes.BlockID{Hash: bytes.HexBytes(header.Hash()), PartSetHeader: cmttypes.PartSetHeader{Total: 1, Hash: bytes.HexBytes(data.Hash())}},
Signatures: []cmttypes.CommitSig{
{
BlockIDFlag: cmttypes.BlockIDFlagCommit,
ValidatorAddress: cmttypes.Address(header.ProposerAddress),
Timestamp: header.Time(),
Signature: header.Signature,
},
},
}

return commitForPrevBlock, nil
}

return &cmttypes.Commit{
Height: int64(blockHeight),
Round: 0,
BlockID: cmttypes.BlockID{},
Signatures: []cmttypes.CommitSig{},
}, nil
}

func cometCommitToABCICommitInfo(commit *cmttypes.Commit) abci.CommitInfo {
if len(commit.Signatures) == 0 {
return abci.CommitInfo{
Round: commit.Round,
Votes: []abci.VoteInfo{},
}
}

votes := make([]abci.VoteInfo, len(commit.Signatures))
for i, sig := range commit.Signatures {
votes[i] = abci.VoteInfo{
Validator: abci.Validator{
Address: sig.ValidatorAddress,
Power: 0,
},
BlockIdFlag: cmtprototypes.BlockIDFlag(sig.BlockIDFlag),
}
}

return abci.CommitInfo{
Round: commit.Round,
Votes: votes,
}
}

Expand Down Expand Up @@ -618,34 +633,3 @@ func (a *Adapter) GetTxs(ctx context.Context) ([][]byte, error) {
func (a *Adapter) SetFinal(ctx context.Context, blockHeight uint64) error {
return nil
}

func cometCommitToABCICommitInfo(commit *cmttypes.Commit) abci.CommitInfo {
if commit == nil {
return abci.CommitInfo{
Round: 0,
Votes: []abci.VoteInfo{},
}
}

if len(commit.Signatures) == 0 {
return abci.CommitInfo{
Round: commit.Round,
Votes: []abci.VoteInfo{},
}
}

votes := make([]abci.VoteInfo, len(commit.Signatures))
for i, sig := range commit.Signatures {
votes[i] = abci.VoteInfo{
Validator: abci.Validator{
Address: sig.ValidatorAddress,
Power: 0,
},
BlockIdFlag: cmtprototypes.BlockIDFlag(sig.BlockIDFlag),
}
}
return abci.CommitInfo{
Round: commit.Round,
Votes: votes,
}
}
Loading
Loading