From 3330663697d8a7ba96b4dc85b3983a76fb10f2fc Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 25 Oct 2023 15:45:38 +0300 Subject: [PATCH 01/59] wip: add apply l1 block hashes txs logic --- common/types/block.go | 6 ++- common/types/chunk.go | 7 ++- common/types/message/message.go | 17 ++++--- contracts/src/libraries/codec/ChunkCodec.sol | 30 ++++++++---- .../logic/provertask/chunk_prover_task.go | 17 +++++-- coordinator/internal/orm/chunk.go | 16 +++++++ coordinator/internal/orm/l2_block.go | 46 ++++++++++--------- ...15_add_l1_block_hashes_fields_to_chunk.sql | 17 +++++++ ..._add_last_applied_l1_block_to_l2_block.sql | 14 ++++++ prover/core/prover.go | 17 +++++-- prover/prover.go | 7 ++- rollup/cmd/rollup_relayer/app/app.go | 2 +- .../internal/controller/relayer/l2_relayer.go | 11 +++-- .../controller/watcher/chunk_proposer.go | 41 ++++++++++++++--- rollup/internal/orm/chunk.go | 11 ++--- rollup/internal/orm/l2_block.go | 46 ++++++++++--------- 16 files changed, 214 insertions(+), 91 deletions(-) create mode 100644 database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql create mode 100644 database/migrate/migrations/00016_add_last_applied_l1_block_to_l2_block.sql diff --git a/common/types/block.go b/common/types/block.go index 64b8c70e8c..0fc2a8c50b 100644 --- a/common/types/block.go +++ b/common/types/block.go @@ -27,13 +27,14 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { return memoryCost } -// WrappedBlock contains the block's Header, Transactions and WithdrawTrieRoot hash. +// WrappedBlock contains the block's Header, Transactions, WithdrawTrieRoot hash and LastAppliedL1Block. type WrappedBlock struct { Header *types.Header `json:"header"` // Transactions is only used for recover types.Transactions, the from of types.TransactionData field is missing. Transactions []*types.TransactionData `json:"transactions"` WithdrawRoot common.Hash `json:"withdraw_trie_root,omitempty"` RowConsumption *types.RowConsumption `json:"row_consumption"` + LastAppliedL1Block uint64 `json:"latest_applied_l1_block"` txPayloadLengthCache map[string]uint64 } @@ -67,7 +68,7 @@ func (w *WrappedBlock) NumL2Transactions() uint64 { // Encode encodes the WrappedBlock into RollupV2 BlockContext Encoding. func (w *WrappedBlock) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { - bytes := make([]byte, 60) + bytes := make([]byte, 68) if !w.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") @@ -92,6 +93,7 @@ func (w *WrappedBlock) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) binary.BigEndian.PutUint64(bytes[48:], w.Header.GasLimit) binary.BigEndian.PutUint16(bytes[56:], uint16(numTransactions)) binary.BigEndian.PutUint16(bytes[58:], uint16(numL1Messages)) + binary.BigEndian.PutUint64(bytes[60:], w.LastAppliedL1Block) return bytes, nil } diff --git a/common/types/chunk.go b/common/types/chunk.go index feba818def..72703f1319 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -14,7 +14,9 @@ import ( // Chunk contains blocks to be encoded type Chunk struct { - Blocks []*WrappedBlock `json:"blocks"` + Blocks []*WrappedBlock `json:"blocks"` + LastAppliedL1Block uint64 `json:"latest_applied_l1_block"` + L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` } // NumL1Messages returns the number of L1 messages in this chunk. @@ -77,6 +79,9 @@ func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { chunkBytes = append(chunkBytes, l2TxDataBytes...) + binary.BigEndian.AppendUint64(chunkBytes, c.LastAppliedL1Block) + chunkBytes = append(chunkBytes, c.L1BlockRangeHash.Bytes()...) + return chunkBytes, nil } diff --git a/common/types/message/message.go b/common/types/message/message.go index b687af559e..60fa01ca8b 100644 --- a/common/types/message/message.go +++ b/common/types/message/message.go @@ -220,7 +220,9 @@ type TaskMsg struct { // ChunkTaskDetail is a type containing ChunkTask detail. type ChunkTaskDetail struct { - BlockHashes []common.Hash `json:"block_hashes"` + BlockHashes []common.Hash `json:"block_hashes"` + PrevLastAppliedL1Block uint64 `json:"prev_last_applied_l1_block"` + L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` } // BatchTaskDetail is a type containing BatchTask detail. @@ -253,12 +255,13 @@ func (z *ProofDetail) Hash() ([]byte, error) { // ChunkInfo is for calculating pi_hash for chunk type ChunkInfo struct { - ChainID uint64 `json:"chain_id"` - PrevStateRoot common.Hash `json:"prev_state_root"` - PostStateRoot common.Hash `json:"post_state_root"` - WithdrawRoot common.Hash `json:"withdraw_root"` - DataHash common.Hash `json:"data_hash"` - IsPadding bool `json:"is_padding"` + ChainID uint64 `json:"chain_id"` + PrevStateRoot common.Hash `json:"prev_state_root"` + PostStateRoot common.Hash `json:"post_state_root"` + L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` + WithdrawRoot common.Hash `json:"withdraw_root"` + DataHash common.Hash `json:"data_hash"` + IsPadding bool `json:"is_padding"` } // ChunkProof includes the proof info that are required for chunk verification and rollup. diff --git a/contracts/src/libraries/codec/ChunkCodec.sol b/contracts/src/libraries/codec/ChunkCodec.sol index 0da4d95252..3dafc3f1c6 100644 --- a/contracts/src/libraries/codec/ChunkCodec.sol +++ b/contracts/src/libraries/codec/ChunkCodec.sol @@ -2,19 +2,21 @@ pragma solidity ^0.8.16; -/// @dev Below is the encoding for `Chunk`, total 60*n+1+m bytes. +/// @dev Below is the encoding for `Chunk`, total 68*n+9+m bytes. /// ```text -/// * Field Bytes Type Index Comments -/// * numBlocks 1 uint8 0 The number of blocks in this chunk -/// * block[0] 60 BlockContext 1 The first block in this chunk +/// * Field Bytes Type Index Comments +/// * numBlocks 1 uint8 0 The number of blocks in this chunk +/// * block[0] 68 BlockContext 1 The first block in this chunk /// * ...... -/// * block[i] 60 BlockContext 60*i+1 The (i+1)'th block in this chunk +/// * block[i] 68 BlockContext 68*i+1 The (i+1)'th block in this chunk /// * ...... -/// * block[n-1] 60 BlockContext 60*n-59 The last block in this chunk -/// * l2Transactions dynamic bytes 60*n+1 +/// * block[n-1] 68 BlockContext 68*n-67 The last block in this chunk +/// * l2Transactions dynamic bytes 68*n+1 +/// * lastAppliedL1Block 8 uint64 68*n+1+m The last applied L1 block number. +/// * l1BlockRangeHash 32 bytes 68*n+9+m The hash of the L1 block range. /// ``` /// -/// @dev Below is the encoding for `BlockContext`, total 60 bytes. +/// @dev Below is the encoding for `BlockContext`, total 68 bytes. /// ```text /// * Field Bytes Type Index Comments /// * blockNumber 8 uint64 0 The height of this block. @@ -23,9 +25,10 @@ pragma solidity ^0.8.16; /// * gasLimit 8 uint64 48 The gas limit of this block. /// * numTransactions 2 uint16 56 The number of transactions in this block, both L1 & L2 txs. /// * numL1Messages 2 uint16 58 The number of l1 messages in this block. +/// * lastAppliedL1Block 8 uint64 60 The last applied L1 block number. /// ``` library ChunkCodec { - uint256 internal constant BLOCK_CONTEXT_LENGTH = 60; + uint256 internal constant BLOCK_CONTEXT_LENGTH = 68; /// @notice Validate the length of chunk. /// @param chunkPtr The start memory offset of the chunk in memory. @@ -61,6 +64,15 @@ library ChunkCodec { } } + /// @notice Return the number of last applied L1 block. + /// @param blockPtr The start memory offset of the block context in memory. + /// @return _lastAppliedL1Block The number of last applied L1 block. + function lastAppliedL1Block(uint256 blockPtr) internal pure returns (uint256 _lastAppliedL1Block) { + assembly { + _lastAppliedL1Block := shr(240, mload(add(blockPtr, 60))) + } + } + /// @notice Copy the block context to another memory. /// @param chunkPtr The start memory offset of the chunk in memory. /// @param dstPtr The destination memory offset to store the block context. diff --git a/coordinator/internal/logic/provertask/chunk_prover_task.go b/coordinator/internal/logic/provertask/chunk_prover_task.go index 84b108e0ec..4343c1486f 100644 --- a/coordinator/internal/logic/provertask/chunk_prover_task.go +++ b/coordinator/internal/logic/provertask/chunk_prover_task.go @@ -130,7 +130,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato return nil, ErrCoordinatorInternalFailure } - taskMsg, err := cp.formatProverTask(ctx, &proverTask) + taskMsg, err := cp.formatProverTask(ctx, &proverTask, chunkTask) if err != nil { cp.recoverActiveAttempts(ctx, chunkTask) log.Error("format prover task failure", "hash", chunkTask.Hash, "err", err) @@ -142,7 +142,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato return taskMsg, nil } -func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask) (*coordinatorType.GetTaskSchema, error) { +func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, chunk *orm.Chunk) (*coordinatorType.GetTaskSchema, error) { // Get block hashes. wrappedBlocks, wrappedErr := cp.blockOrm.GetL2BlocksByChunkHash(ctx, task.TaskID) if wrappedErr != nil || len(wrappedBlocks) == 0 { @@ -154,10 +154,17 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove blockHashes[i] = wrappedBlock.Header.Hash() } + parentChunk, err := cp.chunkOrm.GetChunkByHash(ctx, chunk.ParentChunkHash) + if err != nil { + return nil, fmt.Errorf("failed to fetch parent chunk blocks, chunk hash:%s err:%w", chunk.ParentChunkHash, err) + } + taskDetail := message.ChunkTaskDetail{ - BlockHashes: blockHashes, + BlockHashes: blockHashes, + PrevLastAppliedL1Block: parentChunk.LastAppliedL1Block, + L1BlockRangeHash: common.HexToHash(chunk.L1BlockRangeHash), } - blockHashesBytes, err := json.Marshal(taskDetail) + taskDataBytes, err := json.Marshal(taskDetail) if err != nil { return nil, fmt.Errorf("failed to marshal block hashes hash:%s, err:%w", task.TaskID, err) } @@ -166,7 +173,7 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove UUID: task.UUID.String(), TaskID: task.TaskID, TaskType: int(message.ProofTypeChunk), - TaskData: string(blockHashesBytes), + TaskData: string(taskDataBytes), } return proverTaskSchema, nil diff --git a/coordinator/internal/orm/chunk.go b/coordinator/internal/orm/chunk.go index 965157a3be..88397ed103 100644 --- a/coordinator/internal/orm/chunk.go +++ b/coordinator/internal/orm/chunk.go @@ -33,6 +33,8 @@ type Chunk struct { StateRoot string `json:"state_root" gorm:"column:state_root"` ParentChunkStateRoot string `json:"parent_chunk_state_root" gorm:"column:parent_chunk_state_root"` WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block" gorm:"column:last_applied_l1_block"` + L1BlockRangeHash string `json:"l1_block_range_hash" gorm:"column:l1_block_range_hash"` // proof ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"` @@ -125,6 +127,18 @@ func (o *Chunk) GetChunksByBatchHash(ctx context.Context, batchHash string) ([]* return chunks, nil } +func (o *Chunk) GetChunkByHash(ctx context.Context, hash string) (*Chunk, error) { + db := o.db.WithContext(ctx) + db = db.Model(&Chunk{}) + db = db.Where("hash", hash) + + var chunk Chunk + if err := db.First(&chunk).Error; err != nil { + return nil, fmt.Errorf("Chunk.GetChunkByHash error: %w", err) + } + return &chunk, nil +} + // GetProofsByBatchHash retrieves the proofs associated with a specific batch hash. // It returns a slice of decoded proofs (message.ChunkProof) obtained from the database. // The returned proofs are sorted in ascending order by their associated chunk index. @@ -280,6 +294,8 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *types.Chunk, dbTX ...*go StateRoot: chunk.Blocks[numBlocks-1].Header.Root.Hex(), ParentChunkStateRoot: parentChunkStateRoot, WithdrawRoot: chunk.Blocks[numBlocks-1].WithdrawRoot.Hex(), + LastAppliedL1Block: chunk.LastAppliedL1Block, + L1BlockRangeHash: chunk.L1BlockRangeHash.Hex(), ProvingStatus: int16(types.ProvingTaskUnassigned), TotalAttempts: 0, ActiveAttempts: 0, diff --git a/coordinator/internal/orm/l2_block.go b/coordinator/internal/orm/l2_block.go index 38728dea8f..26b3c4adb5 100644 --- a/coordinator/internal/orm/l2_block.go +++ b/coordinator/internal/orm/l2_block.go @@ -19,17 +19,18 @@ type L2Block struct { db *gorm.DB `gorm:"column:-"` // block - Number uint64 `json:"number" gorm:"number"` - Hash string `json:"hash" gorm:"hash"` - ParentHash string `json:"parent_hash" gorm:"parent_hash"` - Header string `json:"header" gorm:"header"` - Transactions string `json:"transactions" gorm:"transactions"` - WithdrawRoot string `json:"withdraw_root" gorm:"withdraw_root"` - StateRoot string `json:"state_root" gorm:"state_root"` - TxNum uint32 `json:"tx_num" gorm:"tx_num"` - GasUsed uint64 `json:"gas_used" gorm:"gas_used"` - BlockTimestamp uint64 `json:"block_timestamp" gorm:"block_timestamp"` - RowConsumption string `json:"row_consumption" gorm:"row_consumption"` + Number uint64 `json:"number" gorm:"number"` + Hash string `json:"hash" gorm:"hash"` + ParentHash string `json:"parent_hash" gorm:"parent_hash"` + Header string `json:"header" gorm:"header"` + Transactions string `json:"transactions" gorm:"transactions"` + WithdrawRoot string `json:"withdraw_root" gorm:"withdraw_root"` + StateRoot string `json:"state_root" gorm:"state_root"` + LastAppliedL1Block uint64 `json:"latest_applied_l1_block" gorm:"latest_applied_l1_block"` + TxNum uint32 `json:"tx_num" gorm:"tx_num"` + GasUsed uint64 `json:"gas_used" gorm:"gas_used"` + BlockTimestamp uint64 `json:"block_timestamp" gorm:"block_timestamp"` + RowConsumption string `json:"row_consumption" gorm:"row_consumption"` // chunk ChunkHash string `json:"chunk_hash" gorm:"chunk_hash;default:NULL"` @@ -112,17 +113,18 @@ func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*types.WrappedBlo } l2Block := L2Block{ - Number: block.Header.Number.Uint64(), - Hash: block.Header.Hash().String(), - ParentHash: block.Header.ParentHash.String(), - Transactions: string(txs), - WithdrawRoot: block.WithdrawRoot.Hex(), - StateRoot: block.Header.Root.Hex(), - TxNum: uint32(len(block.Transactions)), - GasUsed: block.Header.GasUsed, - BlockTimestamp: block.Header.Time, - Header: string(header), - RowConsumption: string(rc), + Number: block.Header.Number.Uint64(), + Hash: block.Header.Hash().String(), + ParentHash: block.Header.ParentHash.String(), + Transactions: string(txs), + WithdrawRoot: block.WithdrawRoot.Hex(), + StateRoot: block.Header.Root.Hex(), + LastAppliedL1Block: block.LastAppliedL1Block, + TxNum: uint32(len(block.Transactions)), + GasUsed: block.Header.GasUsed, + BlockTimestamp: block.Header.Time, + Header: string(header), + RowConsumption: string(rc), } l2Blocks = append(l2Blocks, l2Block) } diff --git a/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql b/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql new file mode 100644 index 0000000000..0bbea4b890 --- /dev/null +++ b/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql @@ -0,0 +1,17 @@ +-- +goose Up +-- +goose StatementBegin + +ALTER TABLE chunk + ADD COLUMN last_applied_l1_block BIGINT NOT NULL DEFAULT 0, + ADD COLUMN l1_block_range_hash VARCHAR DEFAULT NULL; + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +ALTER TABLE IF EXISTS chunk +DROP COLUMN last_applied_l1_block; +DROP COLUMN l1_block_range_hash; + +-- +goose StatementEnd diff --git a/database/migrate/migrations/00016_add_last_applied_l1_block_to_l2_block.sql b/database/migrate/migrations/00016_add_last_applied_l1_block_to_l2_block.sql new file mode 100644 index 0000000000..518df8e822 --- /dev/null +++ b/database/migrate/migrations/00016_add_last_applied_l1_block_to_l2_block.sql @@ -0,0 +1,14 @@ +-- +goose Up +-- +goose StatementBegin + +ALTER TABLE l2_block ADD COLUMN last_applied_l1_block BIGINT NOT NULL DEFAULT 0; + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +ALTER TABLE IF EXISTS l2_block +DROP COLUMN last_applied_l1_block; + +-- +goose StatementEnd diff --git a/prover/core/prover.go b/prover/core/prover.go index 6cd29bfeb3..f34e3dba75 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -17,6 +17,7 @@ import ( "path/filepath" "unsafe" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" @@ -105,7 +106,12 @@ func (p *ProverCore) ProveBatch(taskID string, chunkInfos []*message.ChunkInfo, } // ProveChunk call rust ffi to generate chunk proof. -func (p *ProverCore) ProveChunk(taskID string, traces []*types.BlockTrace) (*message.ChunkProof, error) { +func (p *ProverCore) ProveChunk( + taskID string, + traces []*types.BlockTrace, + prevLastAppliedL1Block uint64, + l1BlockRangeHash common.Hash, +) (*message.ChunkProof, error) { if p.cfg.ProofType != message.ProofTypeChunk { return nil, fmt.Errorf("prover is not a chunk-prover (type: %v), but is trying to prove a chunk", p.cfg.ProofType) } @@ -114,7 +120,7 @@ func (p *ProverCore) ProveChunk(taskID string, traces []*types.BlockTrace) (*mes if err != nil { return nil, err } - proofByt, err := p.proveChunk(tracesByt) + proofByt, err := p.proveChunk(tracesByt, prevLastAppliedL1Block, l1BlockRangeHash.Bytes()) if err != nil { return nil, err } @@ -205,12 +211,15 @@ func (p *ProverCore) proveBatch(chunkInfosByt []byte, chunkProofsByt []byte) ([] return result.Message, nil } -func (p *ProverCore) proveChunk(tracesByt []byte) ([]byte, error) { +func (p *ProverCore) proveChunk(tracesByt []byte, prevLastAppliedL1Block uint64, l1BlockRangeHash []byte) ([]byte, error) { tracesStr := C.CString(string(tracesByt)) defer C.free(unsafe.Pointer(tracesStr)) + l1BlockRangeHashStr := C.CString(string(l1BlockRangeHash)) + defer C.free(unsafe.Pointer(l1BlockRangeHashStr)) + log.Info("Start to create chunk proof ...") - cProof := C.gen_chunk_proof(tracesStr) + cProof := C.gen_chunk_proof(tracesStr, C.uint64_t(prevLastAppliedL1Block), l1BlockRangeHashStr) defer C.free_c_chars(cProof) log.Info("Finish creating chunk proof!") diff --git a/prover/prover.go b/prover/prover.go index 43d7d1d891..2e625417eb 100644 --- a/prover/prover.go +++ b/prover/prover.go @@ -288,7 +288,12 @@ func (r *Prover) proveChunk(task *store.ProvingTask) (*message.ChunkProof, error if err != nil { return nil, fmt.Errorf("get traces from eth node failed, block hashes: %v, err: %v", task.Task.ChunkTaskDetail.BlockHashes, err) } - return r.proverCore.ProveChunk(task.Task.ID, traces) + return r.proverCore.ProveChunk( + task.Task.ID, + traces, + task.Task.ChunkTaskDetail.PrevLastAppliedL1Block, + task.Task.ChunkTaskDetail.L1BlockRangeHash, + ) } func (r *Prover) proveBatch(task *store.ProvingTask) (*message.BatchProof, error) { diff --git a/rollup/cmd/rollup_relayer/app/app.go b/rollup/cmd/rollup_relayer/app/app.go index 63213fd297..d82a1fa5a4 100644 --- a/rollup/cmd/rollup_relayer/app/app.go +++ b/rollup/cmd/rollup_relayer/app/app.go @@ -80,7 +80,7 @@ func action(ctx *cli.Context) error { return err } - chunkProposer := watcher.NewChunkProposer(subCtx, cfg.L2Config.ChunkProposerConfig, db, registry) + chunkProposer := watcher.NewChunkProposer(subCtx, l2client, cfg.L2Config.ChunkProposerConfig, db, registry) if err != nil { log.Error("failed to create chunkProposer", "config file", cfgFile, "error", err) return err diff --git a/rollup/internal/controller/relayer/l2_relayer.go b/rollup/internal/controller/relayer/l2_relayer.go index ca10f6ba89..42c4058e71 100644 --- a/rollup/internal/controller/relayer/l2_relayer.go +++ b/rollup/internal/controller/relayer/l2_relayer.go @@ -165,16 +165,17 @@ func (r *Layer2Relayer) initializeGenesis() error { chunk := &types.Chunk{ Blocks: []*types.WrappedBlock{{ - Header: genesis, - Transactions: nil, - WithdrawRoot: common.Hash{}, - RowConsumption: &gethTypes.RowConsumption{}, + Header: genesis, + Transactions: nil, + WithdrawRoot: common.Hash{}, + RowConsumption: &gethTypes.RowConsumption{}, + LastAppliedL1Block: 0, }}, } err = r.db.Transaction(func(dbTX *gorm.DB) error { var dbChunk *orm.Chunk - dbChunk, err = r.chunkOrm.InsertChunk(r.ctx, chunk, dbTX) + dbChunk, err = r.chunkOrm.InsertChunk(r.ctx, nil, chunk, dbTX) if err != nil { return fmt.Errorf("failed to insert chunk: %v", err) } diff --git a/rollup/internal/controller/watcher/chunk_proposer.go b/rollup/internal/controller/watcher/chunk_proposer.go index 92d707c8bc..aed628f11e 100644 --- a/rollup/internal/controller/watcher/chunk_proposer.go +++ b/rollup/internal/controller/watcher/chunk_proposer.go @@ -4,11 +4,13 @@ import ( "context" "errors" "fmt" + "math/big" "time" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" gethTypes "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/ethclient" "github.com/scroll-tech/go-ethereum/log" "gorm.io/gorm" @@ -48,6 +50,8 @@ type ChunkProposer struct { ctx context.Context db *gorm.DB + *ethclient.Client + chunkOrm *orm.Chunk l2BlockOrm *orm.L2Block @@ -74,7 +78,7 @@ type ChunkProposer struct { } // NewChunkProposer creates a new ChunkProposer instance. -func NewChunkProposer(ctx context.Context, cfg *config.ChunkProposerConfig, db *gorm.DB, reg prometheus.Registerer) *ChunkProposer { +func NewChunkProposer(ctx context.Context, client *ethclient.Client, cfg *config.ChunkProposerConfig, db *gorm.DB, reg prometheus.Registerer) *ChunkProposer { log.Debug("new chunk proposer", "maxTxNumPerChunk", cfg.MaxTxNumPerChunk, "maxL1CommitGasPerChunk", cfg.MaxL1CommitGasPerChunk, @@ -85,6 +89,7 @@ func NewChunkProposer(ctx context.Context, cfg *config.ChunkProposerConfig, db * return &ChunkProposer{ ctx: ctx, + Client: client, db: db, chunkOrm: orm.NewChunk(db), l2BlockOrm: orm.NewL2Block(db), @@ -149,28 +154,34 @@ func NewChunkProposer(ctx context.Context, cfg *config.ChunkProposerConfig, db * // TryProposeChunk tries to propose a new chunk. func (p *ChunkProposer) TryProposeChunk() { + parentChunk, err := p.chunkOrm.GetLatestChunk(p.ctx) + if err != nil && !errors.Is(errors.Unwrap(err), gorm.ErrRecordNotFound) { + log.Error("failed to get latest chunk", "err", err) + return + } + p.chunkProposerCircleTotal.Inc() - proposedChunk, err := p.proposeChunk() + proposedChunk, err := p.proposeChunk(parentChunk) if err != nil { p.proposeChunkFailureTotal.Inc() log.Error("propose new chunk failed", "err", err) return } - if err := p.updateChunkInfoInDB(proposedChunk); err != nil { + if err := p.updateChunkInfoInDB(parentChunk, proposedChunk); err != nil { p.proposeChunkUpdateInfoFailureTotal.Inc() log.Error("update chunk info in orm failed", "err", err) } } -func (p *ChunkProposer) updateChunkInfoInDB(chunk *types.Chunk) error { +func (p *ChunkProposer) updateChunkInfoInDB(parentChunk *orm.Chunk, chunk *types.Chunk) error { if chunk == nil { return nil } p.proposeChunkUpdateInfoTotal.Inc() err := p.db.Transaction(func(dbTX *gorm.DB) error { - dbChunk, err := p.chunkOrm.InsertChunk(p.ctx, chunk, dbTX) + dbChunk, err := p.chunkOrm.InsertChunk(p.ctx, parentChunk, chunk, dbTX) if err != nil { log.Warn("ChunkProposer.InsertChunk failed", "chunk hash", chunk.Hash) return err @@ -184,7 +195,7 @@ func (p *ChunkProposer) updateChunkInfoInDB(chunk *types.Chunk) error { return err } -func (p *ChunkProposer) proposeChunk() (*types.Chunk, error) { +func (p *ChunkProposer) proposeChunk(parentChunk *orm.Chunk) (*types.Chunk, error) { unchunkedBlockHeight, err := p.chunkOrm.GetUnchunkedBlockHeight(p.ctx) if err != nil { return nil, err @@ -206,6 +217,24 @@ func (p *ChunkProposer) proposeChunk() (*types.Chunk, error) { var totalL1CommitCalldataSize uint64 var totalL1CommitGas uint64 crc := chunkRowConsumption{} + lastAppliedL1Block := blocks[len(blocks)-1].LastAppliedL1Block + var l1BlockRangeHashFrom uint64 + + if parentChunk != nil { + l1BlockRangeHashFrom = parentChunk.LastAppliedL1Block + if l1BlockRangeHashFrom != 0 { + l1BlockRangeHashFrom++ + } + } + + l1BlockRangeHash, err := p.Client.GetL1BlockRangeHash(p.ctx, big.NewInt(int64(l1BlockRangeHashFrom)), big.NewInt(int64(lastAppliedL1Block))) + if err != nil { + log.Error("failed to get block range hash", "err", err) + return nil, fmt.Errorf("chunk-proposer failed to get block range hash error: %w", err) + } + + chunk.LastAppliedL1Block = lastAppliedL1Block + chunk.L1BlockRangeHash = *l1BlockRangeHash for i, block := range blocks { // metric values diff --git a/rollup/internal/orm/chunk.go b/rollup/internal/orm/chunk.go index 49778fdf1c..185ef38e9a 100644 --- a/rollup/internal/orm/chunk.go +++ b/rollup/internal/orm/chunk.go @@ -30,6 +30,8 @@ type Chunk struct { StateRoot string `json:"state_root" gorm:"column:state_root"` ParentChunkStateRoot string `json:"parent_chunk_state_root" gorm:"column:parent_chunk_state_root"` WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"` + LastAppliedL1Block uint64 `json:"latest_applied_l1_block" gorm:"column:latest_applied_l1_block"` + L1BlockRangeHash string `json:"l1_block_range_hash" gorm:"column:l1_block_range_hash"` // proof ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"` @@ -135,7 +137,7 @@ func (o *Chunk) GetChunksGEIndex(ctx context.Context, index uint64, limit int) ( } // InsertChunk inserts a new chunk into the database. -func (o *Chunk) InsertChunk(ctx context.Context, chunk *types.Chunk, dbTX ...*gorm.DB) (*Chunk, error) { +func (o *Chunk) InsertChunk(ctx context.Context, parentChunk *Chunk, chunk *types.Chunk, dbTX ...*gorm.DB) (*Chunk, error) { if chunk == nil || len(chunk.Blocks) == 0 { return nil, errors.New("invalid args") } @@ -144,11 +146,6 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *types.Chunk, dbTX ...*go var totalL1MessagePoppedBefore uint64 var parentChunkHash string var parentChunkStateRoot string - parentChunk, err := o.GetLatestChunk(ctx) - if err != nil && !errors.Is(errors.Unwrap(err), gorm.ErrRecordNotFound) { - log.Error("failed to get latest chunk", "err", err) - return nil, fmt.Errorf("Chunk.InsertChunk error: %w", err) - } // if parentChunk==nil then err==gorm.ErrRecordNotFound, which means there's // not chunk record in the db, we then use default empty values for the creating chunk; @@ -194,6 +191,8 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *types.Chunk, dbTX ...*go StateRoot: chunk.Blocks[numBlocks-1].Header.Root.Hex(), ParentChunkStateRoot: parentChunkStateRoot, WithdrawRoot: chunk.Blocks[numBlocks-1].WithdrawRoot.Hex(), + LastAppliedL1Block: chunk.LastAppliedL1Block, + L1BlockRangeHash: chunk.L1BlockRangeHash.Hex(), ProvingStatus: int16(types.ProvingTaskUnassigned), } diff --git a/rollup/internal/orm/l2_block.go b/rollup/internal/orm/l2_block.go index 6f1e019e3a..ae34625455 100644 --- a/rollup/internal/orm/l2_block.go +++ b/rollup/internal/orm/l2_block.go @@ -19,17 +19,18 @@ type L2Block struct { db *gorm.DB `gorm:"column:-"` // block - Number uint64 `json:"number" gorm:"number"` - Hash string `json:"hash" gorm:"hash"` - ParentHash string `json:"parent_hash" gorm:"parent_hash"` - Header string `json:"header" gorm:"header"` - Transactions string `json:"transactions" gorm:"transactions"` - WithdrawRoot string `json:"withdraw_root" gorm:"withdraw_root"` - StateRoot string `json:"state_root" gorm:"state_root"` - TxNum uint32 `json:"tx_num" gorm:"tx_num"` - GasUsed uint64 `json:"gas_used" gorm:"gas_used"` - BlockTimestamp uint64 `json:"block_timestamp" gorm:"block_timestamp"` - RowConsumption string `json:"row_consumption" gorm:"row_consumption"` + Number uint64 `json:"number" gorm:"number"` + Hash string `json:"hash" gorm:"hash"` + ParentHash string `json:"parent_hash" gorm:"parent_hash"` + Header string `json:"header" gorm:"header"` + Transactions string `json:"transactions" gorm:"transactions"` + WithdrawRoot string `json:"withdraw_root" gorm:"withdraw_root"` + StateRoot string `json:"state_root" gorm:"state_root"` + TxNum uint32 `json:"tx_num" gorm:"tx_num"` + GasUsed uint64 `json:"gas_used" gorm:"gas_used"` + BlockTimestamp uint64 `json:"block_timestamp" gorm:"block_timestamp"` + RowConsumption string `json:"row_consumption" gorm:"row_consumption"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block" gorm:"last_applied_l1_block"` // chunk ChunkHash string `json:"chunk_hash" gorm:"chunk_hash;default:NULL"` @@ -207,17 +208,18 @@ func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*types.WrappedBlo } l2Block := L2Block{ - Number: block.Header.Number.Uint64(), - Hash: block.Header.Hash().String(), - ParentHash: block.Header.ParentHash.String(), - Transactions: string(txs), - WithdrawRoot: block.WithdrawRoot.Hex(), - StateRoot: block.Header.Root.Hex(), - TxNum: uint32(len(block.Transactions)), - GasUsed: block.Header.GasUsed, - BlockTimestamp: block.Header.Time, - RowConsumption: string(rc), - Header: string(header), + Number: block.Header.Number.Uint64(), + Hash: block.Header.Hash().String(), + ParentHash: block.Header.ParentHash.String(), + Transactions: string(txs), + WithdrawRoot: block.WithdrawRoot.Hex(), + StateRoot: block.Header.Root.Hex(), + TxNum: uint32(len(block.Transactions)), + GasUsed: block.Header.GasUsed, + BlockTimestamp: block.Header.Time, + RowConsumption: string(rc), + Header: string(header), + LastAppliedL1Block: block.LastAppliedL1Block, } l2Blocks = append(l2Blocks, l2Block) } From 2a6c74fabf53983ebd9d0dce38906e61e39f7e42 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 25 Oct 2023 17:31:02 +0300 Subject: [PATCH 02/59] wip: add chunk trace and block context fixes --- common/types/block.go | 6 +++--- common/types/chunk.go | 6 +++--- common/types/message/message.go | 7 +++++++ coordinator/internal/orm/l2_block.go | 2 ++ prover/core/prover.go | 18 ++++++------------ prover/prover.go | 10 +++++++--- 6 files changed, 28 insertions(+), 21 deletions(-) diff --git a/common/types/block.go b/common/types/block.go index 0fc2a8c50b..b98cd9c0ec 100644 --- a/common/types/block.go +++ b/common/types/block.go @@ -110,7 +110,7 @@ func (w *WrappedBlock) EstimateL1CommitCalldataSize() uint64 { size += 4 // 4 bytes payload length size += w.getTxPayloadLength(txData) } - size += 60 // 60 bytes BlockContext + size += 68 // 68 bytes BlockContext return size } @@ -130,8 +130,8 @@ func (w *WrappedBlock) EstimateL1CommitGas() uint64 { total += GetKeccak256Gas(txPayloadLength) // l2 tx hash } - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + // 68 bytes BlockContext calldata + total += CalldataNonZeroByteGas * 68 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue diff --git a/common/types/chunk.go b/common/types/chunk.go index 72703f1319..4614df565c 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -55,8 +55,8 @@ func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { } totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) - if len(blockBytes) != 60 { - return nil, fmt.Errorf("block encoding is not 60 bytes long %x", len(blockBytes)) + if len(blockBytes) != 68 { + return nil, fmt.Errorf("block encoding is not 68 bytes long %x", len(blockBytes)) } chunkBytes = append(chunkBytes, blockBytes...) @@ -136,7 +136,7 @@ func (c *Chunk) EstimateL1CommitGas() uint64 { numBlocks := uint64(len(c.Blocks)) totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 68 // numBlocks of BlockContext in chunk totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas diff --git a/common/types/message/message.go b/common/types/message/message.go index 60fa01ca8b..44f1bb7be1 100644 --- a/common/types/message/message.go +++ b/common/types/message/message.go @@ -9,6 +9,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" + "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/rlp" ) @@ -276,6 +277,12 @@ type ChunkProof struct { GitVersion string `json:"git_version,omitempty"` } +type ChunkTrace struct { + BlockTraces []*types.BlockTrace `json:"block_traces"` + PrevLastAppliedL1Block uint64 `json:"prev_last_applied_l1_block"` + L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` +} + // BatchProof includes the proof info that are required for batch verification and rollup. type BatchProof struct { Proof []byte `json:"proof"` diff --git a/coordinator/internal/orm/l2_block.go b/coordinator/internal/orm/l2_block.go index 26b3c4adb5..9c755679b1 100644 --- a/coordinator/internal/orm/l2_block.go +++ b/coordinator/internal/orm/l2_block.go @@ -83,6 +83,8 @@ func (o *L2Block) GetL2BlocksByChunkHash(ctx context.Context, chunkHash string) return nil, fmt.Errorf("L2Block.GetL2BlocksByChunkHash error: %w, chunk hash: %v", err, chunkHash) } + wrappedBlock.LastAppliedL1Block = v.LastAppliedL1Block + wrappedBlocks = append(wrappedBlocks, &wrappedBlock) } diff --git a/prover/core/prover.go b/prover/core/prover.go index f34e3dba75..ebccb4eddd 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -17,7 +17,6 @@ import ( "path/filepath" "unsafe" - "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" @@ -108,19 +107,17 @@ func (p *ProverCore) ProveBatch(taskID string, chunkInfos []*message.ChunkInfo, // ProveChunk call rust ffi to generate chunk proof. func (p *ProverCore) ProveChunk( taskID string, - traces []*types.BlockTrace, - prevLastAppliedL1Block uint64, - l1BlockRangeHash common.Hash, + chunkTrace *message.ChunkTrace, ) (*message.ChunkProof, error) { if p.cfg.ProofType != message.ProofTypeChunk { return nil, fmt.Errorf("prover is not a chunk-prover (type: %v), but is trying to prove a chunk", p.cfg.ProofType) } - tracesByt, err := json.Marshal(traces) + chunkTraceByt, err := json.Marshal(chunkTrace) if err != nil { return nil, err } - proofByt, err := p.proveChunk(tracesByt, prevLastAppliedL1Block, l1BlockRangeHash.Bytes()) + proofByt, err := p.proveChunk(chunkTraceByt) if err != nil { return nil, err } @@ -211,12 +208,9 @@ func (p *ProverCore) proveBatch(chunkInfosByt []byte, chunkProofsByt []byte) ([] return result.Message, nil } -func (p *ProverCore) proveChunk(tracesByt []byte, prevLastAppliedL1Block uint64, l1BlockRangeHash []byte) ([]byte, error) { - tracesStr := C.CString(string(tracesByt)) - defer C.free(unsafe.Pointer(tracesStr)) - - l1BlockRangeHashStr := C.CString(string(l1BlockRangeHash)) - defer C.free(unsafe.Pointer(l1BlockRangeHashStr)) +func (p *ProverCore) proveChunk(chunkTraceByt []byte) ([]byte, error) { + chunkTraceBytStr := C.CString(string(chunkTraceByt)) + defer C.free(unsafe.Pointer(chunkTraceBytStr)) log.Info("Start to create chunk proof ...") cProof := C.gen_chunk_proof(tracesStr, C.uint64_t(prevLastAppliedL1Block), l1BlockRangeHashStr) diff --git a/prover/prover.go b/prover/prover.go index 2e625417eb..bd8563f5ed 100644 --- a/prover/prover.go +++ b/prover/prover.go @@ -22,6 +22,7 @@ import ( "scroll-tech/prover/store" putils "scroll-tech/prover/utils" + "scroll-tech/common/types" "scroll-tech/common/types/message" "scroll-tech/common/utils" ) @@ -288,11 +289,14 @@ func (r *Prover) proveChunk(task *store.ProvingTask) (*message.ChunkProof, error if err != nil { return nil, fmt.Errorf("get traces from eth node failed, block hashes: %v, err: %v", task.Task.ChunkTaskDetail.BlockHashes, err) } + chunkTrace := message.ChunkTrace{ + BlockTraces: traces, + PrevLastAppliedL1Block: task.Task.ChunkTaskDetail.PrevLastAppliedL1Block, + L1BlockRangeHash: task.Task.ChunkTaskDetail.L1BlockRangeHash, + } return r.proverCore.ProveChunk( task.Task.ID, - traces, - task.Task.ChunkTaskDetail.PrevLastAppliedL1Block, - task.Task.ChunkTaskDetail.L1BlockRangeHash, + &chunkTrace, ) } From 18b18e021029b5caed5fe60fdcd82a0c2689085a Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Fri, 27 Oct 2023 10:51:39 +0300 Subject: [PATCH 03/59] feat: add L1Blocks contract --- contracts/src/L2/IL1Blocks.sol | 26 +++++++++++++ contracts/src/L2/L1Blocks.sol | 70 ++++++++++++++++++++++++++++++++++ 2 files changed, 96 insertions(+) create mode 100644 contracts/src/L2/IL1Blocks.sol create mode 100644 contracts/src/L2/L1Blocks.sol diff --git a/contracts/src/L2/IL1Blocks.sol b/contracts/src/L2/IL1Blocks.sol new file mode 100644 index 0000000000..30c30c1113 --- /dev/null +++ b/contracts/src/L2/IL1Blocks.sol @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.16; + +interface IL1Blocks { + /** + * @dev Gets the l1 block hash for a given its block number + * @param _number The l1 block number + * @return hash_ The l1 block hash for the provided block number + */ + function l1Blockhash(uint256 _number) external view returns (bytes32 hash_); + + /** + * @dev Gets the latest l1 block hash applied by the sequencer + * @notice This does not mean that this is the latest L1 block number in the + * L1 blockchain, but rather the last item in the block hashes array + * @return hash_ The latest l1 block hash from the block hashes array + */ + function latestBlockhash() external view returns (bytes32 hash_); + + /** + * @dev Appends an array of block hashes to the block hashes array + * @param _blocks The array of new block hashes + */ + function appendBlockhashes(bytes32[] calldata _blocks) external; +} \ No newline at end of file diff --git a/contracts/src/L2/L1Blocks.sol b/contracts/src/L2/L1Blocks.sol new file mode 100644 index 0000000000..a422d5bf72 --- /dev/null +++ b/contracts/src/L2/L1Blocks.sol @@ -0,0 +1,70 @@ +// SPDX-License-Identifier: MIT + +pragma solidity =0.8.16; + + +/// @title L1Blocks + +contract L1Blocks { + /// @notice The max count of block hashes to store. + uint16 public constant BLOCK_HASHES_SIZE = 65536; + + /// @notice The latest L1 block number known by the L2 system. + uint64 public lastAppliedL1Block; + + /// @notice Storage slot with the address of the current block hashes offset. + /// @dev This is the keccak-256 hash of "l1blocks.block_hashes_storage_offset". + bytes32 private constant BLOCK_HASHES_STORAGE_OFFSET = + 0x46b6ca24459c6768b3d8d5d90e9189b00e3ebb5fe38fb16cb9819816d9fe1c2d; + + modifier onlySequencer() { + require(msg.sender == address(0), "L1Blocks: caller is not the sequencer"); + _; + } + + constructor(uint64 _firstAppliedL1Block) { + // The first applied L1 block number. + lastAppliedL1Block = _firstAppliedL1Block - 1; + } + + function l1Blockhash(uint256 _number) external view returns (bytes32 hash_) { + uint64 lastAppliedL1Block_ = lastAppliedL1Block; + + /// @dev It handles the case where the block is in the future. + require(_number <= lastAppliedL1Block_, "L1Blocks: hash number out of bounds"); + + /// @dev It handles the case where the block is no longer in the ring buffer. + require(lastAppliedL1Block_ - _number < BLOCK_HASHES_SIZE, "L1Blocks: hash number out of bounds"); + + assembly { + hash_ := sload(add(BLOCK_HASHES_STORAGE_OFFSET, mod(_number, BLOCK_HASHES_SIZE))) + } + + /// @dev The zero hash means the block hash is not yet set. + require(hash_ != bytes32(0), "L1Blocks: hash number out of bounds"); + } + + function latestBlockhash() external view returns (bytes32 hash_) { + return l1Blockhash(lastAppliedL1Block); + } + + function appendBlockhashes(bytes32[] calldata _blocks) external onlySequencer { + uint64 lastAppliedL1Block_ = lastAppliedL1Block; + uint256 length = _blocks.length; + + assembly { + for { + let i := 0 + } lt(i, length) { + i := add(i, 1) + } { + lastAppliedL1Block_ := add(lastAppliedL1Block_, 1) + let offset_ := add(BLOCK_HASHES_STORAGE_OFFSET, mod(lastAppliedL1Block_, BLOCK_HASHES_SIZE)) + let hash_ := calldataload(add(0x44, mul(i, 0x20))) + sstore(offset_, hash_) + } + } + + lastAppliedL1Block = lastAppliedL1Block_; + } +} From e20e5ca0330ff28f85e913f263ca05624957e81d Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Fri, 10 Nov 2023 19:51:45 +0200 Subject: [PATCH 04/59] fix: add L1Blocks contract fix and unit tests --- contracts/src/L2/L1Blocks.sol | 9 +-- contracts/src/package.json | 107 ++++++++++++------------ contracts/src/test/L1Blocks.t.sol | 130 ++++++++++++++++++++++++++++++ 3 files changed, 188 insertions(+), 58 deletions(-) create mode 100644 contracts/src/test/L1Blocks.t.sol diff --git a/contracts/src/L2/L1Blocks.sol b/contracts/src/L2/L1Blocks.sol index a422d5bf72..8e4a3e5250 100644 --- a/contracts/src/L2/L1Blocks.sol +++ b/contracts/src/L2/L1Blocks.sol @@ -2,12 +2,11 @@ pragma solidity =0.8.16; - /// @title L1Blocks contract L1Blocks { /// @notice The max count of block hashes to store. - uint16 public constant BLOCK_HASHES_SIZE = 65536; + uint32 public constant BLOCK_HASHES_SIZE = 65536; /// @notice The latest L1 block number known by the L2 system. uint64 public lastAppliedL1Block; @@ -27,7 +26,7 @@ contract L1Blocks { lastAppliedL1Block = _firstAppliedL1Block - 1; } - function l1Blockhash(uint256 _number) external view returns (bytes32 hash_) { + function l1Blockhash(uint256 _number) public view returns (bytes32 hash_) { uint64 lastAppliedL1Block_ = lastAppliedL1Block; /// @dev It handles the case where the block is in the future. @@ -48,9 +47,9 @@ contract L1Blocks { return l1Blockhash(lastAppliedL1Block); } - function appendBlockhashes(bytes32[] calldata _blocks) external onlySequencer { + function appendBlockhashes(bytes32[] calldata _hashes) external onlySequencer { uint64 lastAppliedL1Block_ = lastAppliedL1Block; - uint256 length = _blocks.length; + uint256 length = _hashes.length; assembly { for { diff --git a/contracts/src/package.json b/contracts/src/package.json index 51566006f8..779a648680 100644 --- a/contracts/src/package.json +++ b/contracts/src/package.json @@ -1,54 +1,55 @@ { - "name": "@scroll-tech/contracts", - "description": "A library for interacting with Scroll contracts.", - "version": "0.0.4", - "repository": { - "type": "git", - "url": "https://github.com/scroll-tech/scroll.git" - }, - "files": [ - "L1/gateways", - "L1/rollup/IL1MessageQueue.sol", - "L1/rollup/IL2GasPriceOracle.sol", - "L1/rollup/IScrollChain.sol", - "L1/IL1ScrollMessenger.sol", - "L2/gateways", - "L2/predeploys/IL1BlockContainer.sol", - "L2/predeploys/IL1GasPriceOracle.sol", - "L2/IL2ScrollMessenger.sol", - "interfaces", - "libraries/callbacks", - "libraries/gateway", - "libraries/oracle/IGasOracle.sol", - "libraries/token/IScrollERC20.sol", - "libraries/token/IScrollERC20Extension.sol", - "libraries/token/IScrollERC1155.sol", - "libraries/token/IScrollERC1155Extension.sol", - "libraries/token/IScrollERC721.sol", - "libraries/token/IScrollERC721Extension.sol", - "libraries/token/IScrollStandardERC20.sol", - "libraries/token/IScrollStandardERC20Factory.sol", - "libraries/IScrollMessenger.sol" - ], - "keywords": [ - "solidity", - "ethereum", - "smart", - "contracts", - "layer2", - "l2", - "scroll", - "zkevm", - "zkp", - "bridge", - "erc20", - "erc712", - "erc1155" - ], - "author": "Scroll", - "license": "MIT", - "bugs": { - "url": "https://github.com/scroll-tech/scroll-contracts/issues" - }, - "homepage": "https://scroll.io/" - } + "name": "@scroll-tech/contracts", + "description": "A library for interacting with Scroll contracts.", + "version": "0.0.4", + "repository": { + "type": "git", + "url": "https://github.com/scroll-tech/scroll.git" + }, + "files": [ + "L1/gateways", + "L1/rollup/IL1MessageQueue.sol", + "L1/rollup/IL2GasPriceOracle.sol", + "L1/rollup/IScrollChain.sol", + "L1/IL1ScrollMessenger.sol", + "L2/gateways", + "L2/predeploys/IL1BlockContainer.sol", + "L2/predeploys/IL1GasPriceOracle.sol", + "L2/IL2ScrollMessenger.sol", + "L2/IL1Blocks.sol", + "interfaces", + "libraries/callbacks", + "libraries/gateway", + "libraries/oracle/IGasOracle.sol", + "libraries/token/IScrollERC20.sol", + "libraries/token/IScrollERC20Extension.sol", + "libraries/token/IScrollERC1155.sol", + "libraries/token/IScrollERC1155Extension.sol", + "libraries/token/IScrollERC721.sol", + "libraries/token/IScrollERC721Extension.sol", + "libraries/token/IScrollStandardERC20.sol", + "libraries/token/IScrollStandardERC20Factory.sol", + "libraries/IScrollMessenger.sol" + ], + "keywords": [ + "solidity", + "ethereum", + "smart", + "contracts", + "layer2", + "l2", + "scroll", + "zkevm", + "zkp", + "bridge", + "erc20", + "erc712", + "erc1155" + ], + "author": "Scroll", + "license": "MIT", + "bugs": { + "url": "https://github.com/scroll-tech/scroll-contracts/issues" + }, + "homepage": "https://scroll.io/" +} diff --git a/contracts/src/test/L1Blocks.t.sol b/contracts/src/test/L1Blocks.t.sol new file mode 100644 index 0000000000..1793e4141c --- /dev/null +++ b/contracts/src/test/L1Blocks.t.sol @@ -0,0 +1,130 @@ +// SPDX-License-Identifier: MIT + +pragma solidity =0.8.16; + +import {DSTestPlus} from "solmate/test/utils/DSTestPlus.sol"; + +import {L1Blocks} from "../L2/L1Blocks.sol"; + +contract L1BlocksTest is DSTestPlus { + L1Blocks private l1Blocks; + uint32 private blockHashesSize; + uint64 private firstAppliedL1Block = 1; + + function setUp() public { + l1Blocks = new L1Blocks(firstAppliedL1Block); + blockHashesSize = l1Blocks.BLOCK_HASHES_SIZE(); + } + + function testFuzzAppendBlockhashesSingleSuccess(bytes32 _hash) external { + hevm.assume(_hash != bytes32(0)); + bytes32[] memory hashes = new bytes32[](1); + hashes[0] = _hash; + + hevm.startPrank(address(0)); + l1Blocks.appendBlockhashes(hashes); + hevm.stopPrank(); + + assertEq(l1Blocks.latestBlockhash(), _hash); + assertEq(l1Blocks.l1Blockhash(l1Blocks.lastAppliedL1Block()), hashes[0]); + } + + function testFuzzAppendBlockhashesManySuccess(bytes32[] memory hashes) external { + uint256 size = hashes.length; + hevm.assume(size > 0); + + for (uint256 i = 0; i < size; i++) { + if (hashes[i] == bytes32(0)) { + hashes[i] = keccak256(abi.encodePacked(i)); + } + } + + hevm.startPrank(address(0)); + l1Blocks.appendBlockhashes(hashes); + hevm.stopPrank(); + + uint256 lastAppliedL1Block = l1Blocks.lastAppliedL1Block(); + + for (uint256 i = 0; i < size; i++) { + assertEq(l1Blocks.l1Blockhash(lastAppliedL1Block - size + 1 + i), hashes[i]); + } + assertEq(l1Blocks.latestBlockhash(), hashes[size - 1]); + } + + function testFuzzGetL1BlockHashLowerBoundFail(uint256 lowerBound) external { + lowerBound = lowerBound % uint256(firstAppliedL1Block); + + bytes32[] memory hashes = new bytes32[](1); + hashes[0] = keccak256(abi.encodePacked(lowerBound)); + + hevm.startPrank(address(0)); + l1Blocks.appendBlockhashes(hashes); + hevm.stopPrank(); + + assertEq(l1Blocks.latestBlockhash(), hashes[0]); + + hevm.expectRevert("L1Blocks: hash number out of bounds"); + l1Blocks.l1Blockhash(lowerBound); + } + + function testFuzzGetL1BlockHashUpperBoundFail(uint64 upperBound) external { + uint256 lastAppliedL1Block = l1Blocks.lastAppliedL1Block(); + hevm.assume(upperBound > lastAppliedL1Block + 1); + + bytes32[] memory hashes = new bytes32[](1); + hashes[0] = keccak256(abi.encodePacked(upperBound)); + + hevm.startPrank(address(0)); + l1Blocks.appendBlockhashes(hashes); + hevm.stopPrank(); + + assertEq(l1Blocks.latestBlockhash(), hashes[0]); + + hevm.expectRevert("L1Blocks: hash number out of bounds"); + l1Blocks.l1Blockhash(upperBound); + } + + function testFuzzAppendBlockhashesNonSequencerFail(address nonSequencer) external { + hevm.assume(nonSequencer != address(0)); + + bytes32[] memory hashes = new bytes32[](1); + hashes[0] = keccak256(abi.encodePacked("test")); + + hevm.startPrank(nonSequencer); + hevm.expectRevert("L1Blocks: caller is not the sequencer"); + l1Blocks.appendBlockhashes(hashes); + hevm.stopPrank(); + } + + function testGetL1BlockHashOverwrittenRingMapSuccess() external { + hevm.startPrank(address(0)); + + uint64 lowerBound = 0; + uint8 times = 3; + bytes32[] memory hashes = new bytes32[](1); + bytes32 testHash = keccak256(abi.encodePacked("test")); + + for (uint64 i = 1; i <= uint256(times) * blockHashesSize + (times - 1); i++) { + hashes[0] = bytes32(uint256(testHash) + i); + l1Blocks.appendBlockhashes(hashes); + + assertEq(l1Blocks.latestBlockhash(), hashes[0]); + + if (i % blockHashesSize == 0) { + lowerBound = i - blockHashesSize + 1; + + hevm.expectRevert("L1Blocks: hash number out of bounds"); + l1Blocks.l1Blockhash(lowerBound - 1); + + for (uint64 k = lowerBound; k < i + 1; k++) { + assertEq(l1Blocks.l1Blockhash(k), bytes32(uint256(testHash) + k)); + } + + hevm.expectRevert("L1Blocks: hash number out of bounds"); + l1Blocks.l1Blockhash(i + 1); + } + } + + hevm.stopPrank(); + } +} From fd479407f8709616bcd2e102e7a8a79c24dc78f9 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Mon, 13 Nov 2023 16:40:59 +0200 Subject: [PATCH 05/59] fix: add fix --- coordinator/internal/orm/chunk.go | 1 + .../migrations/00015_add_l1_block_hashes_fields_to_chunk.sql | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/coordinator/internal/orm/chunk.go b/coordinator/internal/orm/chunk.go index 88397ed103..cfc5d0b5fc 100644 --- a/coordinator/internal/orm/chunk.go +++ b/coordinator/internal/orm/chunk.go @@ -127,6 +127,7 @@ func (o *Chunk) GetChunksByBatchHash(ctx context.Context, batchHash string) ([]* return chunks, nil } +// GetChunkByHash retrieves the first chunk associated with a specific chunk hash. func (o *Chunk) GetChunkByHash(ctx context.Context, hash string) (*Chunk, error) { db := o.db.WithContext(ctx) db = db.Model(&Chunk{}) diff --git a/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql b/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql index 0bbea4b890..5231a73a59 100644 --- a/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql +++ b/database/migrate/migrations/00015_add_l1_block_hashes_fields_to_chunk.sql @@ -11,7 +11,7 @@ ALTER TABLE chunk -- +goose StatementBegin ALTER TABLE IF EXISTS chunk -DROP COLUMN last_applied_l1_block; +DROP COLUMN last_applied_l1_block, DROP COLUMN l1_block_range_hash; -- +goose StatementEnd From dc9086251bf392cc4424b8512d6854e0c3f04ced Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Mon, 13 Nov 2023 16:53:51 +0200 Subject: [PATCH 06/59] test: fix database test --- database/migrate/migrate_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database/migrate/migrate_test.go b/database/migrate/migrate_test.go index df449a927c..d17ea443cb 100644 --- a/database/migrate/migrate_test.go +++ b/database/migrate/migrate_test.go @@ -63,7 +63,7 @@ func testResetDB(t *testing.T) { cur, err := Current(pgDB.DB) assert.NoError(t, err) // total number of tables. - assert.Equal(t, 14, int(cur)) + assert.Equal(t, 16, int(cur)) } func testMigrate(t *testing.T) { From 151631e30be34eb48f96e5e724d29e09644fbacf Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 14 Nov 2023 13:52:13 +0200 Subject: [PATCH 07/59] script: L1Blocks deployment --- contracts/.env.example | 2 ++ contracts/scripts/README.md | 6 +++++ .../foundry/DeployL2L1BlocksContract.s.sol | 25 +++++++++++++++++++ 3 files changed, 33 insertions(+) create mode 100644 contracts/scripts/foundry/DeployL2L1BlocksContract.s.sol diff --git a/contracts/.env.example b/contracts/.env.example index 49e95ed1d3..4dd1ba832d 100644 --- a/contracts/.env.example +++ b/contracts/.env.example @@ -10,3 +10,5 @@ L1_DEPLOYER_PRIVATE_KEY=0xabc123abc123abc123abc123abc123abc123abc123abc123abc123 L2_DEPLOYER_PRIVATE_KEY=0xabc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc1 CHAIN_ID_L2="5343541" + +L1_BLOCKS_FIRST_APPLIED="1" diff --git a/contracts/scripts/README.md b/contracts/scripts/README.md index b6cc671826..396d53246a 100644 --- a/contracts/scripts/README.md +++ b/contracts/scripts/README.md @@ -84,6 +84,7 @@ SCROLL_L2_RPC="http://localhost:8545" L1_DEPLOYER_PRIVATE_KEY="0x0000000000000000000000000000000000000000000000000000000000000001" L2_DEPLOYER_PRIVATE_KEY="0x0000000000000000000000000000000000000000000000000000000000000002" L1_ROLLUP_OPERATOR_ADDR="0x1111111111111111111111111111111111111111" +L1_BLOCKS_FIRST_APPLIED="1" $ source .env @@ -101,4 +102,9 @@ $ source .env.l2_addresses # Initialize contracts $ forge script scripts/foundry/InitializeL1BridgeContracts.s.sol:InitializeL1BridgeContracts --rpc-url $SCROLL_L1_RPC --broadcast $ forge script scripts/foundry/InitializeL2BridgeContracts.s.sol:InitializeL2BridgeContracts --rpc-url $SCROLL_L2_RPC --broadcast + +# Deploy L2 L1Blocks contract +$ OUTPUT=$(forge script scripts/foundry/DeployL2L1BlocksContract.s.sol:DeployL2L1BlocksContract --rpc-url $SCROLL_L2_RPC --broadcast); echo $OUTPUT +$ echo "$OUTPUT" | grep -Eo "(L2)_.*" > .env.l2_addresses +$ source .env.l2_addresses ``` diff --git a/contracts/scripts/foundry/DeployL2L1BlocksContract.s.sol b/contracts/scripts/foundry/DeployL2L1BlocksContract.s.sol new file mode 100644 index 0000000000..f4b06a70cd --- /dev/null +++ b/contracts/scripts/foundry/DeployL2L1BlocksContract.s.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.10; + +import {Script} from "forge-std/Script.sol"; +import {console} from "forge-std/console.sol"; + +import {L1Blocks} from "../../src/L2/L1Blocks.sol"; + +contract DeployL2L1BlocksContract is Script { + uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY"); + uint64 L1_BLOCKS_FIRST_APPLIED = uint64(vm.envUint("L1_BLOCKS_FIRST_APPLIED")); + + function run() external { + vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY); + + L1Blocks l1Blocks = new L1Blocks(L1_BLOCKS_FIRST_APPLIED); + logAddress("L2_L1BLOCKS_ADDR", address(l1Blocks)); + + vm.stopBroadcast(); + } + + function logAddress(string memory name, address addr) internal view { + console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr))))); + } +} From fe21b54d5c8b62a9251631392c8d7816493a908c Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 14 Nov 2023 14:43:20 +0200 Subject: [PATCH 08/59] fix: add update chunk prover task --- .../internal/logic/provertask/chunk_prover_task.go | 11 ++++++++--- coordinator/internal/orm/chunk.go | 7 ++++++- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/coordinator/internal/logic/provertask/chunk_prover_task.go b/coordinator/internal/logic/provertask/chunk_prover_task.go index 4343c1486f..8314881b46 100644 --- a/coordinator/internal/logic/provertask/chunk_prover_task.go +++ b/coordinator/internal/logic/provertask/chunk_prover_task.go @@ -160,9 +160,14 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove } taskDetail := message.ChunkTaskDetail{ - BlockHashes: blockHashes, - PrevLastAppliedL1Block: parentChunk.LastAppliedL1Block, - L1BlockRangeHash: common.HexToHash(chunk.L1BlockRangeHash), + BlockHashes: blockHashes, + PrevLastAppliedL1Block: func() uint64 { + if parentChunk != nil { + return parentChunk.LastAppliedL1Block + } + return 0 + }(), + L1BlockRangeHash: common.HexToHash(chunk.L1BlockRangeHash), } taskDataBytes, err := json.Marshal(taskDetail) if err != nil { diff --git a/coordinator/internal/orm/chunk.go b/coordinator/internal/orm/chunk.go index cfc5d0b5fc..5c40e2d376 100644 --- a/coordinator/internal/orm/chunk.go +++ b/coordinator/internal/orm/chunk.go @@ -134,7 +134,12 @@ func (o *Chunk) GetChunkByHash(ctx context.Context, hash string) (*Chunk, error) db = db.Where("hash", hash) var chunk Chunk - if err := db.First(&chunk).Error; err != nil { + err := db.First(&chunk).Error + if err != nil && errors.Is(err, gorm.ErrRecordNotFound) { + return nil, nil + } + + if err != nil { return nil, fmt.Errorf("Chunk.GetChunkByHash error: %w", err) } return &chunk, nil From aa4005a3d4ee385dd3c76950c849b529358d3471 Mon Sep 17 00:00:00 2001 From: reo101 Date: Tue, 14 Nov 2023 15:12:44 +0200 Subject: [PATCH 09/59] feat(contracts): implement and test `L1ViewOracle` --- contracts/src/L1/IL1ViewOracle.sol | 13 ++++++ contracts/src/L1/L1ViewOracle.sol | 28 +++++++++++ contracts/src/test/L1ViewOracle.t.sol | 67 +++++++++++++++++++++++++++ 3 files changed, 108 insertions(+) create mode 100644 contracts/src/L1/IL1ViewOracle.sol create mode 100644 contracts/src/L1/L1ViewOracle.sol create mode 100644 contracts/src/test/L1ViewOracle.t.sol diff --git a/contracts/src/L1/IL1ViewOracle.sol b/contracts/src/L1/IL1ViewOracle.sol new file mode 100644 index 0000000000..6e5799afca --- /dev/null +++ b/contracts/src/L1/IL1ViewOracle.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT + +pragma solidity =0.8.16; + +interface IL1ViewOracle { + /** + * @dev Returns hash of all the blockhashes in the range + * @param from The block number to get the hash of blockhashes after. + * @param to The block number to get the hash of blockhashes up to. + * @return hash The keccak hash of all blockhashes in the provided range + */ + function blockRangeHash(uint256 from, uint256 to) external view returns (bytes32 hash); +} diff --git a/contracts/src/L1/L1ViewOracle.sol b/contracts/src/L1/L1ViewOracle.sol new file mode 100644 index 0000000000..6bdcf280f9 --- /dev/null +++ b/contracts/src/L1/L1ViewOracle.sol @@ -0,0 +1,28 @@ +// SPDX-License-Identifier: MIT + +pragma solidity =0.8.16; + +import {IL1ViewOracle} from "./IL1ViewOracle.sol"; + +contract L1ViewOracle is IL1ViewOracle { + /** + * @dev Returns hash of all the blockhashes in the range + * @param from The block number to get the hash of blockhashes after. + * @param to The block number to get the hash of blockhashes up to. + * @return hash The keccak hash of all blockhashes in the provided range + */ + function blockRangeHash(uint256 from, uint256 to) external view returns (bytes32 hash) { + require(to >= from, "End must be greater than or equal to start"); + require(to < block.number, "Block range exceeds current block"); + + hash = 0; + + for (uint256 i = from; i <= to; i++) { + bytes32 blockHash = blockhash(i); + + require(blockHash != 0, "Blockhash not available"); + + hash = keccak256(abi.encodePacked(hash, blockHash)); + } + } +} diff --git a/contracts/src/test/L1ViewOracle.t.sol b/contracts/src/test/L1ViewOracle.t.sol new file mode 100644 index 0000000000..630dadc220 --- /dev/null +++ b/contracts/src/test/L1ViewOracle.t.sol @@ -0,0 +1,67 @@ +// SPDX-License-Identifier: MIT + +pragma solidity =0.8.16; + +import {DSTestPlus} from "solmate/test/utils/DSTestPlus.sol"; + +import {L1ViewOracle} from "../L1/L1ViewOracle.sol"; + +contract L1ViewOracleTest is DSTestPlus { + L1ViewOracle private oracle; + + function setUp() public { + oracle = new L1ViewOracle(); + } + + function testTooOldBlocks() external { + hevm.expectRevert("Blockhash not available"); + + hevm.roll(300); + + uint256 from = block.number - 260; + uint256 to = from + 5; + + bytes32 hash = oracle.blockRangeHash(from, to); + } + + function testTooNewBlocks() external { + hevm.expectRevert("Block range exceeds current block"); + + hevm.roll(10); + + uint256 from = block.number - 5; + uint256 to = block.number + 5; + + bytes32 hash = oracle.blockRangeHash(from, to); + } + + function testInvalidRange() external { + hevm.expectRevert("End must be greater than or equal to start"); + + uint256 from = 200; + uint256 to = 100; + + bytes32 hash = oracle.blockRangeHash(from, to); + } + + function testCorrectness() external { + hevm.roll(150); + + uint256 from = 15; + uint256 to = 48; + + bytes32 expectedHash = 0; + + for (uint256 i = from; i <= to; i++) { + bytes32 blockHash = blockhash(i); + + require(blockHash != 0, "Blockhash not available"); + + expectedHash = keccak256(abi.encodePacked(expectedHash, blockHash)); + } + + bytes32 gotHash = oracle.blockRangeHash(from, to); + + assertEq(expectedHash, gotHash); + } +} From 68eff419ea751ef3ca7a360b9d4ffdf658dacf57 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 14 Nov 2023 15:45:04 +0200 Subject: [PATCH 10/59] test: fix tests --- common/types/block.go | 2 +- common/types/chunk.go | 2 +- common/types/chunk_test.go | 26 +++++++++---------- coordinator/internal/orm/l2_block.go | 2 +- .../controller/relayer/l2_relayer_test.go | 4 +-- .../controller/watcher/batch_proposer_test.go | 4 +-- .../controller/watcher/chunk_proposer_test.go | 2 +- rollup/internal/orm/chunk.go | 18 ++++++------- rollup/internal/orm/orm_test.go | 4 +-- rollup/tests/rollup_test.go | 2 +- 10 files changed, 33 insertions(+), 33 deletions(-) diff --git a/common/types/block.go b/common/types/block.go index b98cd9c0ec..3936fd33d2 100644 --- a/common/types/block.go +++ b/common/types/block.go @@ -34,7 +34,7 @@ type WrappedBlock struct { Transactions []*types.TransactionData `json:"transactions"` WithdrawRoot common.Hash `json:"withdraw_trie_root,omitempty"` RowConsumption *types.RowConsumption `json:"row_consumption"` - LastAppliedL1Block uint64 `json:"latest_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` txPayloadLengthCache map[string]uint64 } diff --git a/common/types/chunk.go b/common/types/chunk.go index 4614df565c..dcff23e184 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -15,7 +15,7 @@ import ( // Chunk contains blocks to be encoded type Chunk struct { Blocks []*WrappedBlock `json:"blocks"` - LastAppliedL1Block uint64 `json:"latest_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` } diff --git a/common/types/chunk_test.go b/common/types/chunk_test.go index 8d7e27f08b..2fc8f5f7d4 100644 --- a/common/types/chunk_test.go +++ b/common/types/chunk_test.go @@ -38,7 +38,7 @@ func TestChunkEncode(t *testing.T) { wrappedBlock := &WrappedBlock{} assert.NoError(t, json.Unmarshal(templateBlockTrace, wrappedBlock)) assert.Equal(t, uint64(0), wrappedBlock.NumL1Messages(0)) - assert.Equal(t, uint64(298), wrappedBlock.EstimateL1CommitCalldataSize()) + assert.Equal(t, uint64(306), wrappedBlock.EstimateL1CommitCalldataSize()) assert.Equal(t, uint64(2), wrappedBlock.NumL2Transactions()) chunk = &Chunk{ Blocks: []*WrappedBlock{ @@ -46,12 +46,12 @@ func TestChunkEncode(t *testing.T) { }, } assert.Equal(t, uint64(0), chunk.NumL1Messages(0)) - assert.Equal(t, uint64(6042), chunk.EstimateL1CommitGas()) + assert.Equal(t, uint64(6298), chunk.EstimateL1CommitGas()) bytes, err = chunk.Encode(0) hexString := hex.EncodeToString(bytes) assert.NoError(t, err) - assert.Equal(t, 299, len(bytes)) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000000000000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", hexString) + assert.Equal(t, 339, len(bytes)) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000000000000355418d1e818400020000000000000000000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10000000000000000000000000000000000000000000000000000000000000000", hexString) // Test case 4: when the chunk contains one block with 1 L1MsgTx templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -60,7 +60,7 @@ func TestChunkEncode(t *testing.T) { wrappedBlock2 := &WrappedBlock{} assert.NoError(t, json.Unmarshal(templateBlockTrace2, wrappedBlock2)) assert.Equal(t, uint64(11), wrappedBlock2.NumL1Messages(0)) // 0..=9 skipped, 10 included - assert.Equal(t, uint64(96), wrappedBlock2.EstimateL1CommitCalldataSize()) + assert.Equal(t, uint64(104), wrappedBlock2.EstimateL1CommitCalldataSize()) assert.Equal(t, uint64(1), wrappedBlock2.NumL2Transactions()) chunk = &Chunk{ Blocks: []*WrappedBlock{ @@ -68,12 +68,12 @@ func TestChunkEncode(t *testing.T) { }, } assert.Equal(t, uint64(11), chunk.NumL1Messages(0)) - assert.Equal(t, uint64(5329), chunk.EstimateL1CommitGas()) + assert.Equal(t, uint64(5585), chunk.EstimateL1CommitGas()) bytes, err = chunk.Encode(0) hexString = hex.EncodeToString(bytes) assert.NoError(t, err) - assert.Equal(t, 97, len(bytes)) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", hexString) + assert.Equal(t, 137, len(bytes)) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b000000000000000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080800000000000000000000000000000000000000000000000000000000000000000", hexString) // Test case 5: when the chunk contains two blocks each with 1 L1MsgTx // TODO: revise this test, we cannot reuse the same L1MsgTx twice @@ -84,12 +84,12 @@ func TestChunkEncode(t *testing.T) { }, } assert.Equal(t, uint64(11), chunk.NumL1Messages(0)) - assert.Equal(t, uint64(10612), chunk.EstimateL1CommitGas()) + assert.Equal(t, uint64(11124), chunk.EstimateL1CommitGas()) bytes, err = chunk.Encode(0) hexString = hex.EncodeToString(bytes) assert.NoError(t, err) - assert.Equal(t, 193, len(bytes)) - assert.Equal(t, "02000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a12000001000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", hexString) + assert.Equal(t, 241, len(bytes)) + assert.Equal(t, "02000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b0000000000000000000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a120000010000000000000000000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080800000000000000000000000000000000000000000000000000000000000000000", hexString) } func TestChunkHash(t *testing.T) { @@ -129,7 +129,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0xaa9e494f72bc6965857856f0fae6916f27b2a6591c714a573b2fab46df03b8ae", hash.Hex()) + assert.Equal(t, "0x8d71fbbc486f745ff46ca5d1c0f18ab1f1a1b488e88708034b57d6a1d7fb04ed", hash.Hex()) // Test case 4: successfully hashing a chunk on two blocks each with L1 and L2 txs templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -144,7 +144,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0x2eb7dd63bf8fc29a0f8c10d16c2ae6f9da446907c79d50f5c164d30dc8526b60", hash.Hex()) + assert.Equal(t, "0x6a47de75ba15fdefa5c8f63a43715f633a0f9559cf07e8bd164ac0cae80300cb", hash.Hex()) } func TestErrorPaths(t *testing.T) { diff --git a/coordinator/internal/orm/l2_block.go b/coordinator/internal/orm/l2_block.go index 9c755679b1..dcbe20f4b1 100644 --- a/coordinator/internal/orm/l2_block.go +++ b/coordinator/internal/orm/l2_block.go @@ -26,7 +26,7 @@ type L2Block struct { Transactions string `json:"transactions" gorm:"transactions"` WithdrawRoot string `json:"withdraw_root" gorm:"withdraw_root"` StateRoot string `json:"state_root" gorm:"state_root"` - LastAppliedL1Block uint64 `json:"latest_applied_l1_block" gorm:"latest_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block" gorm:"last_applied_l1_block"` TxNum uint32 `json:"tx_num" gorm:"tx_num"` GasUsed uint64 `json:"gas_used" gorm:"gas_used"` BlockTimestamp uint64 `json:"block_timestamp" gorm:"block_timestamp"` diff --git a/rollup/internal/controller/relayer/l2_relayer_test.go b/rollup/internal/controller/relayer/l2_relayer_test.go index 5b46be7946..030b495749 100644 --- a/rollup/internal/controller/relayer/l2_relayer_test.go +++ b/rollup/internal/controller/relayer/l2_relayer_test.go @@ -55,9 +55,9 @@ func testL2RelayerProcessPendingBatches(t *testing.T) { err = l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) chunkOrm := orm.NewChunk(db) - dbChunk1, err := chunkOrm.InsertChunk(context.Background(), chunk1) + dbChunk1, err := chunkOrm.InsertChunk(context.Background(), nil, chunk1) assert.NoError(t, err) - dbChunk2, err := chunkOrm.InsertChunk(context.Background(), chunk2) + dbChunk2, err := chunkOrm.InsertChunk(context.Background(), dbChunk1, chunk2) assert.NoError(t, err) batchMeta := &types.BatchMeta{ StartChunkIndex: 0, diff --git a/rollup/internal/controller/watcher/batch_proposer_test.go b/rollup/internal/controller/watcher/batch_proposer_test.go index 580d1bdc38..624b486e89 100644 --- a/rollup/internal/controller/watcher/batch_proposer_test.go +++ b/rollup/internal/controller/watcher/batch_proposer_test.go @@ -94,7 +94,7 @@ func testBatchProposerLimits(t *testing.T) { err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) - cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{ + cp := NewChunkProposer(context.Background(), l2Cli, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: 1, MaxTxNumPerChunk: 10000, MaxL1CommitGasPerChunk: 50000000000, @@ -153,7 +153,7 @@ func testBatchCommitGasAndCalldataSizeEstimation(t *testing.T) { err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) - cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{ + cp := NewChunkProposer(context.Background(), l2Cli, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: 1, MaxTxNumPerChunk: 10000, MaxL1CommitGasPerChunk: 50000000000, diff --git a/rollup/internal/controller/watcher/chunk_proposer_test.go b/rollup/internal/controller/watcher/chunk_proposer_test.go index bef35323bf..cb7e3a2f00 100644 --- a/rollup/internal/controller/watcher/chunk_proposer_test.go +++ b/rollup/internal/controller/watcher/chunk_proposer_test.go @@ -152,7 +152,7 @@ func testChunkProposerLimits(t *testing.T) { err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) - cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{ + cp := NewChunkProposer(context.Background(), l2Cli, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: tt.maxBlockNum, MaxTxNumPerChunk: tt.maxTxNum, MaxL1CommitGasPerChunk: tt.maxL1CommitGas, diff --git a/rollup/internal/orm/chunk.go b/rollup/internal/orm/chunk.go index 185ef38e9a..49da23e485 100644 --- a/rollup/internal/orm/chunk.go +++ b/rollup/internal/orm/chunk.go @@ -30,7 +30,7 @@ type Chunk struct { StateRoot string `json:"state_root" gorm:"column:state_root"` ParentChunkStateRoot string `json:"parent_chunk_state_root" gorm:"column:parent_chunk_state_root"` WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"` - LastAppliedL1Block uint64 `json:"latest_applied_l1_block" gorm:"column:latest_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block" gorm:"column:last_applied_l1_block"` L1BlockRangeHash string `json:"l1_block_range_hash" gorm:"column:l1_block_range_hash"` // proof @@ -137,7 +137,7 @@ func (o *Chunk) GetChunksGEIndex(ctx context.Context, index uint64, limit int) ( } // InsertChunk inserts a new chunk into the database. -func (o *Chunk) InsertChunk(ctx context.Context, parentChunk *Chunk, chunk *types.Chunk, dbTX ...*gorm.DB) (*Chunk, error) { +func (o *Chunk) InsertChunk(ctx context.Context, parentDbChunk *Chunk, chunk *types.Chunk, dbTX ...*gorm.DB) (*Chunk, error) { if chunk == nil || len(chunk.Blocks) == 0 { return nil, errors.New("invalid args") } @@ -147,14 +147,14 @@ func (o *Chunk) InsertChunk(ctx context.Context, parentChunk *Chunk, chunk *type var parentChunkHash string var parentChunkStateRoot string - // if parentChunk==nil then err==gorm.ErrRecordNotFound, which means there's + // if parentDbChunk==nil then err==gorm.ErrRecordNotFound, which means there's // not chunk record in the db, we then use default empty values for the creating chunk; - // if parentChunk!=nil then err=nil, then we fill the parentChunk-related data into the creating chunk - if parentChunk != nil { - chunkIndex = parentChunk.Index + 1 - totalL1MessagePoppedBefore = parentChunk.TotalL1MessagesPoppedBefore + uint64(parentChunk.TotalL1MessagesPoppedInChunk) - parentChunkHash = parentChunk.Hash - parentChunkStateRoot = parentChunk.StateRoot + // if parentDbChunk!=nil then err=nil, then we fill the parentChunk-related data into the creating chunk + if parentDbChunk != nil { + chunkIndex = parentDbChunk.Index + 1 + totalL1MessagePoppedBefore = parentDbChunk.TotalL1MessagesPoppedBefore + uint64(parentDbChunk.TotalL1MessagesPoppedInChunk) + parentChunkHash = parentDbChunk.Hash + parentChunkStateRoot = parentDbChunk.StateRoot } hash, err := chunk.Hash(totalL1MessagePoppedBefore) diff --git a/rollup/internal/orm/orm_test.go b/rollup/internal/orm/orm_test.go index bdbde4f884..720062404d 100644 --- a/rollup/internal/orm/orm_test.go +++ b/rollup/internal/orm/orm_test.go @@ -175,11 +175,11 @@ func TestChunkOrm(t *testing.T) { assert.NoError(t, err) assert.NoError(t, migrate.ResetDB(sqlDB)) - dbChunk1, err := chunkOrm.InsertChunk(context.Background(), chunk1) + dbChunk1, err := chunkOrm.InsertChunk(context.Background(), nil, chunk1) assert.NoError(t, err) assert.Equal(t, dbChunk1.Hash, chunkHash1.Hex()) - dbChunk2, err := chunkOrm.InsertChunk(context.Background(), chunk2) + dbChunk2, err := chunkOrm.InsertChunk(context.Background(), dbChunk1, chunk2) assert.NoError(t, err) assert.Equal(t, dbChunk2.Hash, chunkHash2.Hex()) diff --git a/rollup/tests/rollup_test.go b/rollup/tests/rollup_test.go index 5eae9d86b1..8501686ba0 100644 --- a/rollup/tests/rollup_test.go +++ b/rollup/tests/rollup_test.go @@ -56,7 +56,7 @@ func testCommitBatchAndFinalizeBatch(t *testing.T) { err = l2BlockOrm.InsertL2Blocks(context.Background(), wrappedBlocks) assert.NoError(t, err) - cp := watcher.NewChunkProposer(context.Background(), &config.ChunkProposerConfig{ + cp := watcher.NewChunkProposer(context.Background(), l2Client, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: 100, MaxTxNumPerChunk: 10000, MaxL1CommitGasPerChunk: 50000000000, From 07e5fed0e59523262ee58bdae3a022c0285943f6 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 15 Nov 2023 15:31:58 +0200 Subject: [PATCH 11/59] test: fix tests --- prover/core/mock.go | 3 +-- prover/prover.go | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/prover/core/mock.go b/prover/core/mock.go index 082c52bffc..c8208aa4e1 100644 --- a/prover/core/mock.go +++ b/prover/core/mock.go @@ -6,7 +6,6 @@ import ( "math/big" "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" "scroll-tech/common/types/message" @@ -24,7 +23,7 @@ func NewProverCore(cfg *config.ProverCoreConfig) (*ProverCore, error) { return &ProverCore{cfg: cfg}, nil } -func (p *ProverCore) ProveChunk(taskID string, traces []*types.BlockTrace) (*message.ChunkProof, error) { +func (p *ProverCore) ProveChunk(taskID string, chunkTrace *message.ChunkTrace) (*message.ChunkProof, error) { _empty := common.BigToHash(big.NewInt(0)) return &message.ChunkProof{ StorageTrace: _empty[:], diff --git a/prover/prover.go b/prover/prover.go index bd8563f5ed..aa6ca6619f 100644 --- a/prover/prover.go +++ b/prover/prover.go @@ -22,7 +22,6 @@ import ( "scroll-tech/prover/store" putils "scroll-tech/prover/utils" - "scroll-tech/common/types" "scroll-tech/common/types/message" "scroll-tech/common/utils" ) From 708b7489392c5d0ad7fb910859e15dd63ede9663 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 15 Nov 2023 15:38:20 +0200 Subject: [PATCH 12/59] test: fix tests --- prover/core/prover_test.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/prover/core/prover_test.go b/prover/core/prover_test.go index 12fd1a56b1..c0384657fd 100644 --- a/prover/core/prover_test.go +++ b/prover/core/prover_test.go @@ -11,7 +11,6 @@ import ( "os" "testing" - "github.com/scroll-tech/go-ethereum/core/types" "github.com/stretchr/testify/assert" "scroll-tech/common/types/message" @@ -89,17 +88,17 @@ func TestFFI(t *testing.T) { t.Log("Batch VKs must be equal after proving") } -func readChunkTrace(filePat string, as *assert.Assertions) []*types.BlockTrace { +func readChunkTrace(filePat string, as *assert.Assertions) *message.ChunkTrace { f, err := os.Open(filePat) as.NoError(err) defer as.NoError(f.Close()) byt, err := io.ReadAll(f) as.NoError(err) - trace := &types.BlockTrace{} + trace := &message.ChunkTrace{} as.NoError(json.Unmarshal(byt, trace)) - return []*types.BlockTrace{trace} + return *trace } func readVk(filePat string, as *assert.Assertions) string { From b135dbc8afe6cf372bbb12bb183c22d830460f22 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 16 Nov 2023 08:59:03 +0200 Subject: [PATCH 13/59] script: L1ViewOracle deployment --- contracts/scripts/README.md | 5 ++++ .../scripts/foundry/DeployL1ViewOracle.s.sol | 24 +++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 contracts/scripts/foundry/DeployL1ViewOracle.s.sol diff --git a/contracts/scripts/README.md b/contracts/scripts/README.md index 396d53246a..5570e4ae17 100644 --- a/contracts/scripts/README.md +++ b/contracts/scripts/README.md @@ -107,4 +107,9 @@ $ forge script scripts/foundry/InitializeL2BridgeContracts.s.sol:InitializeL2Bri $ OUTPUT=$(forge script scripts/foundry/DeployL2L1BlocksContract.s.sol:DeployL2L1BlocksContract --rpc-url $SCROLL_L2_RPC --broadcast); echo $OUTPUT $ echo "$OUTPUT" | grep -Eo "(L2)_.*" > .env.l2_addresses $ source .env.l2_addresses + +# Deploy L1 ViewOracle contract +$ OUTPUT=$(forge script scripts/foundry/DeployL1ViewOracle.s.sol:DeployL1ViewOracle --rpc-url $SCROLL_L1_RPC --broadcast); echo $OUTPUT +$ echo "$OUTPUT" | grep -Eo "(L1)_.*" > .env.l1_addresses +$ source .env.l1_addresses ``` diff --git a/contracts/scripts/foundry/DeployL1ViewOracle.s.sol b/contracts/scripts/foundry/DeployL1ViewOracle.s.sol new file mode 100644 index 0000000000..8e6f892f2a --- /dev/null +++ b/contracts/scripts/foundry/DeployL1ViewOracle.s.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.10; + +import {Script} from "forge-std/Script.sol"; +import {console} from "forge-std/console.sol"; + +import {L1ViewOracle} from "../../src/L1/L1ViewOracle.sol"; + +contract DeployL1ViewOracle is Script { + uint256 L1_DEPLOYER_PRIVATE_KEY = vm.envUint("L1_DEPLOYER_PRIVATE_KEY"); + + function run() external { + vm.startBroadcast(L1_DEPLOYER_PRIVATE_KEY); + + L1ViewOracle l1ViewOracle = new L1ViewOracle(); + logAddress("L1_VIEW_ORACLE_ADDR", address(l1ViewOracle)); + + vm.stopBroadcast(); + } + + function logAddress(string memory name, address addr) internal view { + console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr))))); + } +} From 24f90205467ab026a898cdaf6ed6d5f92c2efd49 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Fri, 17 Nov 2023 16:45:54 +0200 Subject: [PATCH 14/59] wip: add get l1 block range hash directly from l1 client for chunk proposer --- rollup/abi/bridge_abi.go | 10 +++ rollup/cmd/rollup_relayer/app/app.go | 9 ++- rollup/conf/config.json | 1 + rollup/internal/config/l1.go | 2 + .../controller/watcher/batch_proposer_test.go | 12 ++-- .../controller/watcher/chunk_proposer.go | 63 ++++++++++++++++--- .../controller/watcher/chunk_proposer_test.go | 6 +- .../controller/watcher/watcher_test.go | 6 ++ rollup/tests/rollup_test.go | 6 +- 9 files changed, 98 insertions(+), 17 deletions(-) diff --git a/rollup/abi/bridge_abi.go b/rollup/abi/bridge_abi.go index fe0947cea9..30e02cf717 100644 --- a/rollup/abi/bridge_abi.go +++ b/rollup/abi/bridge_abi.go @@ -39,6 +39,9 @@ var ( // L2AppendMessageEventSignature = keccak256("AppendMessage(uint256,bytes32)") L2AppendMessageEventSignature common.Hash + + // L1ViewOracleABI + L1ViewOracleABI *abi.ABI ) func init() { @@ -60,6 +63,8 @@ func init() { L2FailedRelayedMessageEventSignature = L2ScrollMessengerABI.Events["FailedRelayedMessage"].ID L2AppendMessageEventSignature = L2MessageQueueABI.Events["AppendMessage"].ID + + L1ViewOracleABI, _ = L1ViewOracleMetaData.GetAbi() } // Generated manually from abigen and only necessary events and mutable calls are kept. @@ -104,6 +109,11 @@ var L1GasPriceOracleMetaData = &bind.MetaData{ ABI: "[{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_owner\",\"type\":\"address\"}],\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"l1BaseFee\",\"type\":\"uint256\"}],\"name\":\"L1BaseFeeUpdated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"overhead\",\"type\":\"uint256\"}],\"name\":\"OverheadUpdated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"_oldOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"_newOwner\",\"type\":\"address\"}],\"name\":\"OwnershipTransferred\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"scalar\",\"type\":\"uint256\"}],\"name\":\"ScalarUpdated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"_oldWhitelist\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"address\",\"name\":\"_newWhitelist\",\"type\":\"address\"}],\"name\":\"UpdateWhitelist\",\"type\":\"event\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_data\",\"type\":\"bytes\"}],\"name\":\"getL1Fee\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_data\",\"type\":\"bytes\"}],\"name\":\"getL1GasUsed\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"l1BaseFee\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"overhead\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"renounceOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"scalar\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_l1BaseFee\",\"type\":\"uint256\"}],\"name\":\"setL1BaseFee\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_overhead\",\"type\":\"uint256\"}],\"name\":\"setOverhead\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_scalar\",\"type\":\"uint256\"}],\"name\":\"setScalar\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_newOwner\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_newWhitelist\",\"type\":\"address\"}],\"name\":\"updateWhitelist\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"whitelist\",\"outputs\":[{\"internalType\":\"contract IWhitelist\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"}]\n", } +// L1ViewOracleMetaData +var L1ViewOracleMetaData = &bind.MetaData{ + ABI: "[{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"from\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"to\",\"type\":\"uint256\"}],\"name\":\"blockRangeHash\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"hash\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"}]", +} + // IL1ScrollMessengerL2MessageProof is an auto generated low-level Go binding around an user-defined struct. type IL1ScrollMessengerL2MessageProof struct { BatchIndex *big.Int diff --git a/rollup/cmd/rollup_relayer/app/app.go b/rollup/cmd/rollup_relayer/app/app.go index d82a1fa5a4..f53e992815 100644 --- a/rollup/cmd/rollup_relayer/app/app.go +++ b/rollup/cmd/rollup_relayer/app/app.go @@ -66,6 +66,13 @@ func action(ctx *cli.Context) error { registry := prometheus.DefaultRegisterer observability.Server(ctx, db) + // Init l1geth connection + l1client, err := ethclient.Dial(cfg.L1Config.Endpoint) + if err != nil { + log.Error("failed to connect l1 geth", "config file", cfgFile, "error", err) + return err + } + // Init l2geth connection l2client, err := ethclient.Dial(cfg.L2Config.Endpoint) if err != nil { @@ -80,7 +87,7 @@ func action(ctx *cli.Context) error { return err } - chunkProposer := watcher.NewChunkProposer(subCtx, l2client, cfg.L2Config.ChunkProposerConfig, db, registry) + chunkProposer, err := watcher.NewChunkProposer(subCtx, l1client, cfg.L2Config.ChunkProposerConfig, cfg.L1Config.L1ViewOracleAddress, db, registry) if err != nil { log.Error("failed to create chunkProposer", "config file", cfgFile, "error", err) return err diff --git a/rollup/conf/config.json b/rollup/conf/config.json index c02abe0068..aac0a65d25 100644 --- a/rollup/conf/config.json +++ b/rollup/conf/config.json @@ -3,6 +3,7 @@ "confirmations": "0x6", "endpoint": "DUMMY_ENDPOINT", "l1_message_queue_address": "0x0000000000000000000000000000000000000000", + "l1_view_oracle_address": "0x0000000000000000000000000000000000000000", "scroll_chain_address": "0x0000000000000000000000000000000000000000", "start_height": 0, "relayer_config": { diff --git a/rollup/internal/config/l1.go b/rollup/internal/config/l1.go index 54b5dae168..d8890c29df 100644 --- a/rollup/internal/config/l1.go +++ b/rollup/internal/config/l1.go @@ -19,4 +19,6 @@ type L1Config struct { ScrollChainContractAddress common.Address `json:"scroll_chain_address"` // The relayer config RelayerConfig *RelayerConfig `json:"relayer_config"` + // The L1ViewOracle contract address deployed on layer 1 chain. + L1ViewOracleAddress common.Address `json:"l1_view_oracle_address"` } diff --git a/rollup/internal/controller/watcher/batch_proposer_test.go b/rollup/internal/controller/watcher/batch_proposer_test.go index 624b486e89..642e1ba3e7 100644 --- a/rollup/internal/controller/watcher/batch_proposer_test.go +++ b/rollup/internal/controller/watcher/batch_proposer_test.go @@ -94,7 +94,7 @@ func testBatchProposerLimits(t *testing.T) { err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) - cp := NewChunkProposer(context.Background(), l2Cli, &config.ChunkProposerConfig{ + cp, err := NewChunkProposer(context.Background(), l1Cli, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: 1, MaxTxNumPerChunk: 10000, MaxL1CommitGasPerChunk: 50000000000, @@ -102,7 +102,9 @@ func testBatchProposerLimits(t *testing.T) { MaxRowConsumptionPerChunk: 1000000, ChunkTimeoutSec: 300, GasCostIncreaseMultiplier: 1.2, - }, db, nil) + }, cfg.L1Config.L1ViewOracleAddress, db, nil) + assert.NoError(t, err) + cp.TryProposeChunk() // chunk1 contains block1 cp.TryProposeChunk() // chunk2 contains block2 @@ -153,7 +155,7 @@ func testBatchCommitGasAndCalldataSizeEstimation(t *testing.T) { err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) - cp := NewChunkProposer(context.Background(), l2Cli, &config.ChunkProposerConfig{ + cp, err := NewChunkProposer(context.Background(), l1Cli, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: 1, MaxTxNumPerChunk: 10000, MaxL1CommitGasPerChunk: 50000000000, @@ -161,7 +163,9 @@ func testBatchCommitGasAndCalldataSizeEstimation(t *testing.T) { MaxRowConsumptionPerChunk: 1000000, ChunkTimeoutSec: 300, GasCostIncreaseMultiplier: 1.2, - }, db, nil) + }, cfg.L1Config.L1ViewOracleAddress, db, nil) + assert.NoError(t, err) + cp.TryProposeChunk() // chunk1 contains block1 cp.TryProposeChunk() // chunk2 contains block2 diff --git a/rollup/internal/controller/watcher/chunk_proposer.go b/rollup/internal/controller/watcher/chunk_proposer.go index aed628f11e..a48a0334a8 100644 --- a/rollup/internal/controller/watcher/chunk_proposer.go +++ b/rollup/internal/controller/watcher/chunk_proposer.go @@ -9,12 +9,16 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + "github.com/scroll-tech/go-ethereum" + "github.com/scroll-tech/go-ethereum/accounts/abi" + "github.com/scroll-tech/go-ethereum/common" gethTypes "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/ethclient" "github.com/scroll-tech/go-ethereum/log" "gorm.io/gorm" "scroll-tech/common/types" + bridgeAbi "scroll-tech/rollup/abi" "scroll-tech/rollup/internal/config" "scroll-tech/rollup/internal/orm" @@ -51,9 +55,11 @@ type ChunkProposer struct { db *gorm.DB *ethclient.Client + l1ViewOracleAddress common.Address - chunkOrm *orm.Chunk - l2BlockOrm *orm.L2Block + chunkOrm *orm.Chunk + l2BlockOrm *orm.L2Block + l1ViewOracleABI *abi.ABI maxBlockNumPerChunk uint64 maxTxNumPerChunk uint64 @@ -78,18 +84,25 @@ type ChunkProposer struct { } // NewChunkProposer creates a new ChunkProposer instance. -func NewChunkProposer(ctx context.Context, client *ethclient.Client, cfg *config.ChunkProposerConfig, db *gorm.DB, reg prometheus.Registerer) *ChunkProposer { +func NewChunkProposer(ctx context.Context, client *ethclient.Client, cfg *config.ChunkProposerConfig, l1ViewOracleAddress common.Address, db *gorm.DB, reg prometheus.Registerer) (*ChunkProposer, error) { + if l1ViewOracleAddress == (common.Address{}) { + return nil, errors.New("must pass non-zero l1ViewOracleAddress to BridgeClient") + } + log.Debug("new chunk proposer", "maxTxNumPerChunk", cfg.MaxTxNumPerChunk, "maxL1CommitGasPerChunk", cfg.MaxL1CommitGasPerChunk, "maxL1CommitCalldataSizePerChunk", cfg.MaxL1CommitCalldataSizePerChunk, "maxRowConsumptionPerChunk", cfg.MaxRowConsumptionPerChunk, "chunkTimeoutSec", cfg.ChunkTimeoutSec, - "gasCostIncreaseMultiplier", cfg.GasCostIncreaseMultiplier) + "gasCostIncreaseMultiplier", cfg.GasCostIncreaseMultiplier, + ) return &ChunkProposer{ ctx: ctx, Client: client, + l1ViewOracleAddress: l1ViewOracleAddress, + l1ViewOracleABI: bridgeAbi.L1ViewOracleABI, db: db, chunkOrm: orm.NewChunk(db), l2BlockOrm: orm.NewL2Block(db), @@ -149,7 +162,7 @@ func NewChunkProposer(ctx context.Context, client *ethclient.Client, cfg *config Name: "rollup_propose_chunk_blocks_propose_not_enough_total", Help: "Total number of chunk block propose not enough", }), - } + }, nil } // TryProposeChunk tries to propose a new chunk. @@ -227,10 +240,10 @@ func (p *ChunkProposer) proposeChunk(parentChunk *orm.Chunk) (*types.Chunk, erro } } - l1BlockRangeHash, err := p.Client.GetL1BlockRangeHash(p.ctx, big.NewInt(int64(l1BlockRangeHashFrom)), big.NewInt(int64(lastAppliedL1Block))) + l1BlockRangeHash, err := p.GetL1BlockRangeHash(p.ctx, l1BlockRangeHashFrom, lastAppliedL1Block) if err != nil { - log.Error("failed to get block range hash", "err", err) - return nil, fmt.Errorf("chunk-proposer failed to get block range hash error: %w", err) + log.Error("failed to get l1 block range hash", "err", err) + return nil, fmt.Errorf("chunk-proposer failed to get l1 block range hash error: %w", err) } chunk.LastAppliedL1Block = lastAppliedL1Block @@ -351,3 +364,37 @@ func (p *ChunkProposer) proposeChunk(parentChunk *orm.Chunk) (*types.Chunk, erro p.chunkBlocksProposeNotEnoughTotal.Inc() return nil, nil } + +// GetL1BlockRangeHash gets l1 block range hash from l1 view oracle smart contract. +func (p *ChunkProposer) GetL1BlockRangeHash(ctx context.Context, from uint64, to uint64) (*common.Hash, error) { + input, err := p.l1ViewOracleABI.Pack("blockRangeHash", big.NewInt(int64(from)), big.NewInt(int64(to))) + if err != nil { + return nil, err + } + + output, err := p.Client.CallContract(ctx, ethereum.CallMsg{ + To: &p.l1ViewOracleAddress, + Data: input, + }, nil) + if err != nil { + return nil, err + } + if len(output) == 0 { + if code, err := p.Client.CodeAt(ctx, p.l1ViewOracleAddress, nil); err != nil { + return nil, err + } else if len(code) == 0 { + return nil, fmt.Errorf( + "l1 view oracle contract unknown, address: %v", + p.l1ViewOracleAddress, + ) + } + } + + var l1BlockRangeHash common.Hash + err = p.l1ViewOracleABI.UnpackIntoInterface(l1BlockRangeHash, "blockRangeHash", output) + if err != nil { + return nil, err + } + + return &l1BlockRangeHash, nil +} diff --git a/rollup/internal/controller/watcher/chunk_proposer_test.go b/rollup/internal/controller/watcher/chunk_proposer_test.go index cb7e3a2f00..43b620f8b0 100644 --- a/rollup/internal/controller/watcher/chunk_proposer_test.go +++ b/rollup/internal/controller/watcher/chunk_proposer_test.go @@ -152,7 +152,7 @@ func testChunkProposerLimits(t *testing.T) { err := l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) - cp := NewChunkProposer(context.Background(), l2Cli, &config.ChunkProposerConfig{ + cp, err := NewChunkProposer(context.Background(), l1Cli, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: tt.maxBlockNum, MaxTxNumPerChunk: tt.maxTxNum, MaxL1CommitGasPerChunk: tt.maxL1CommitGas, @@ -160,7 +160,9 @@ func testChunkProposerLimits(t *testing.T) { MaxRowConsumptionPerChunk: tt.maxRowConsumption, ChunkTimeoutSec: tt.chunkTimeoutSec, GasCostIncreaseMultiplier: 1.2, - }, db, nil) + }, cfg.L1Config.L1ViewOracleAddress, db, nil) + assert.NoError(t, err) + cp.TryProposeChunk() chunkOrm := orm.NewChunk(db) diff --git a/rollup/internal/controller/watcher/watcher_test.go b/rollup/internal/controller/watcher/watcher_test.go index 0bae139de3..e9edabf1c0 100644 --- a/rollup/internal/controller/watcher/watcher_test.go +++ b/rollup/internal/controller/watcher/watcher_test.go @@ -24,6 +24,9 @@ var ( base *docker.App + // l1geth client + l1Cli *ethclient.Client + // l2geth client l2Cli *ethclient.Client @@ -47,6 +50,9 @@ func setupEnv(t *testing.T) (err error) { MaxOpenNum: base.DBConfig.MaxOpenNum, MaxIdleNum: base.DBConfig.MaxIdleNum, } + // Create l1geth client. + l1Cli, err = base.L1Client() + assert.NoError(t, err) // Create l2geth client. l2Cli, err = base.L2Client() diff --git a/rollup/tests/rollup_test.go b/rollup/tests/rollup_test.go index 8501686ba0..31f8f17acf 100644 --- a/rollup/tests/rollup_test.go +++ b/rollup/tests/rollup_test.go @@ -56,14 +56,16 @@ func testCommitBatchAndFinalizeBatch(t *testing.T) { err = l2BlockOrm.InsertL2Blocks(context.Background(), wrappedBlocks) assert.NoError(t, err) - cp := watcher.NewChunkProposer(context.Background(), l2Client, &config.ChunkProposerConfig{ + cp, err := watcher.NewChunkProposer(context.Background(), l1Client, &config.ChunkProposerConfig{ MaxBlockNumPerChunk: 100, MaxTxNumPerChunk: 10000, MaxL1CommitGasPerChunk: 50000000000, MaxL1CommitCalldataSizePerChunk: 1000000, MaxRowConsumptionPerChunk: 1048319, ChunkTimeoutSec: 300, - }, db, nil) + }, l1Cfg.L1ViewOracleAddress, db, nil) + assert.NoError(t, err) + cp.TryProposeChunk() batchOrm := orm.NewBatch(db) From 49ecf5367fa72cc75c5e695991cf8a0fc5065b3d Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Mon, 20 Nov 2023 16:36:54 +0200 Subject: [PATCH 15/59] fix: fix issues after rebase --- prover/core/prover.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prover/core/prover.go b/prover/core/prover.go index ebccb4eddd..fcfb728cd0 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -208,10 +208,13 @@ func (p *ProverCore) proveBatch(chunkInfosByt []byte, chunkProofsByt []byte) ([] return result.Message, nil } -func (p *ProverCore) proveChunk(chunkTraceByt []byte) ([]byte, error) { +func (p *ProverCore) proveChunk(chunkTraceByt []byte, prevLastAppliedL1Block uint64, l1BlockRangeHash []byte) ([]byte, error) { chunkTraceBytStr := C.CString(string(chunkTraceByt)) defer C.free(unsafe.Pointer(chunkTraceBytStr)) + l1BlockRangeHashStr := C.CString(string(l1BlockRangeHash)) + defer C.free(unsafe.Pointer(l1BlockRangeHashStr)) + log.Info("Start to create chunk proof ...") cProof := C.gen_chunk_proof(tracesStr, C.uint64_t(prevLastAppliedL1Block), l1BlockRangeHashStr) defer C.free_c_chars(cProof) From 9037ce9365ab64a295d1ed6456410b7f7fdea556 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 21 Nov 2023 15:38:30 +0200 Subject: [PATCH 16/59] wip: add last applied l1 block to chunk data --- common/types/message/message.go | 2 ++ .../internal/logic/provertask/chunk_prover_task.go | 3 ++- prover/core/prover.go | 9 +++++++-- prover/prover.go | 1 + 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/common/types/message/message.go b/common/types/message/message.go index 44f1bb7be1..4354fc5754 100644 --- a/common/types/message/message.go +++ b/common/types/message/message.go @@ -223,6 +223,7 @@ type TaskMsg struct { type ChunkTaskDetail struct { BlockHashes []common.Hash `json:"block_hashes"` PrevLastAppliedL1Block uint64 `json:"prev_last_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` } @@ -280,6 +281,7 @@ type ChunkProof struct { type ChunkTrace struct { BlockTraces []*types.BlockTrace `json:"block_traces"` PrevLastAppliedL1Block uint64 `json:"prev_last_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` } diff --git a/coordinator/internal/logic/provertask/chunk_prover_task.go b/coordinator/internal/logic/provertask/chunk_prover_task.go index 8314881b46..cf94e46152 100644 --- a/coordinator/internal/logic/provertask/chunk_prover_task.go +++ b/coordinator/internal/logic/provertask/chunk_prover_task.go @@ -167,7 +167,8 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove } return 0 }(), - L1BlockRangeHash: common.HexToHash(chunk.L1BlockRangeHash), + LastAppliedL1Block: chunk.LastAppliedL1Block, + L1BlockRangeHash: common.HexToHash(chunk.L1BlockRangeHash), } taskDataBytes, err := json.Marshal(taskDetail) if err != nil { diff --git a/prover/core/prover.go b/prover/core/prover.go index fcfb728cd0..0988ccfd87 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -208,7 +208,7 @@ func (p *ProverCore) proveBatch(chunkInfosByt []byte, chunkProofsByt []byte) ([] return result.Message, nil } -func (p *ProverCore) proveChunk(chunkTraceByt []byte, prevLastAppliedL1Block uint64, l1BlockRangeHash []byte) ([]byte, error) { +func (p *ProverCore) proveChunk(chunkTraceByt []byte, prevLastAppliedL1Block uint64, lastAppliedL1Block uint64, l1BlockRangeHash []byte) ([]byte, error) { chunkTraceBytStr := C.CString(string(chunkTraceByt)) defer C.free(unsafe.Pointer(chunkTraceBytStr)) @@ -216,7 +216,12 @@ func (p *ProverCore) proveChunk(chunkTraceByt []byte, prevLastAppliedL1Block uin defer C.free(unsafe.Pointer(l1BlockRangeHashStr)) log.Info("Start to create chunk proof ...") - cProof := C.gen_chunk_proof(tracesStr, C.uint64_t(prevLastAppliedL1Block), l1BlockRangeHashStr) + cProof := C.gen_chunk_proof( + tracesStr, + C.uint64_t(prevLastAppliedL1Block), + C.uint64_t(lastAppliedL1Block), + l1BlockRangeHashStr, + ) defer C.free_c_chars(cProof) log.Info("Finish creating chunk proof!") diff --git a/prover/prover.go b/prover/prover.go index aa6ca6619f..fcd0bc65e1 100644 --- a/prover/prover.go +++ b/prover/prover.go @@ -291,6 +291,7 @@ func (r *Prover) proveChunk(task *store.ProvingTask) (*message.ChunkProof, error chunkTrace := message.ChunkTrace{ BlockTraces: traces, PrevLastAppliedL1Block: task.Task.ChunkTaskDetail.PrevLastAppliedL1Block, + LastAppliedL1Block: task.Task.ChunkTaskDetail.LastAppliedL1Block, L1BlockRangeHash: task.Task.ChunkTaskDetail.L1BlockRangeHash, } return r.proverCore.ProveChunk( From 0649057bc3f7ff3af68ee89595178058f9c8b708 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 22 Nov 2023 16:38:38 +0200 Subject: [PATCH 17/59] wip: add update chunk info and batch header --- common/types/batch_header.go | 16 ++++-- common/types/chunk.go | 2 +- common/types/message/message.go | 15 +++--- .../libraries/codec/BatchHeaderV0Codec.sol | 50 ++++++++++++++----- .../logic/provertask/batch_prover_task.go | 14 +++--- ...17_add_l1_block_hashes_fields_to_batch.sql | 18 +++++++ .../internal/controller/relayer/l2_relayer.go | 6 ++- rollup/internal/orm/batch.go | 13 +++++ 8 files changed, 104 insertions(+), 30 deletions(-) create mode 100644 database/migrate/migrations/00017_add_l1_block_hashes_fields_to_batch.sql diff --git a/common/types/batch_header.go b/common/types/batch_header.go index 85ebfcebd8..14e12ce9ed 100644 --- a/common/types/batch_header.go +++ b/common/types/batch_header.go @@ -30,6 +30,8 @@ type BatchHeader struct { dataHash common.Hash parentBatchHash common.Hash skippedL1MessageBitmap []byte + lastAppliedL1Block uint64 + l1BlockRangeHash common.Hash } // NewBatchHeader creates a new BatchHeader @@ -109,6 +111,9 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64 dataHash: dataHash, parentBatchHash: parentBatchHash, skippedL1MessageBitmap: bitmapBytes, + // TODO: + lastAppliedL1Block: 0, + l1BlockRangeHash: common.Hash{}, }, nil } @@ -134,14 +139,17 @@ func (b *BatchHeader) SkippedL1MessageBitmap() []byte { // Encode encodes the BatchHeader into RollupV2 BatchHeaderV0Codec Encoding. func (b *BatchHeader) Encode() []byte { - batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) + batchBytes := make([]byte, 129+len(b.skippedL1MessageBitmap)) batchBytes[0] = b.version binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) copy(batchBytes[25:], b.dataHash[:]) copy(batchBytes[57:], b.parentBatchHash[:]) + copy(batchBytes[57:], b.parentBatchHash[:]) copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) + binary.BigEndian.PutUint64(batchBytes[89+len(b.skippedL1MessageBitmap):], b.lastAppliedL1Block) + copy(batchBytes[97+len(b.skippedL1MessageBitmap):], b.l1BlockRangeHash[:]) return batchBytes } @@ -152,7 +160,7 @@ func (b *BatchHeader) Hash() common.Hash { // DecodeBatchHeader attempts to decode the given byte slice into a BatchHeader. func DecodeBatchHeader(data []byte) (*BatchHeader, error) { - if len(data) < 89 { + if len(data) < 97 { return nil, fmt.Errorf("insufficient data for BatchHeader") } b := &BatchHeader{ @@ -162,7 +170,9 @@ func DecodeBatchHeader(data []byte) (*BatchHeader, error) { totalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), dataHash: common.BytesToHash(data[25:57]), parentBatchHash: common.BytesToHash(data[57:89]), - skippedL1MessageBitmap: data[89:], + skippedL1MessageBitmap: data[89 : len(data)-40], + lastAppliedL1Block: binary.BigEndian.Uint64(data[len(data)-40 : len(data)-32]), + l1BlockRangeHash: common.BytesToHash(data[len(data)-32:]), } return b, nil } diff --git a/common/types/chunk.go b/common/types/chunk.go index dcff23e184..415d793939 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -79,7 +79,7 @@ func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { chunkBytes = append(chunkBytes, l2TxDataBytes...) - binary.BigEndian.AppendUint64(chunkBytes, c.LastAppliedL1Block) + binary.BigEndian.PutUint64(chunkBytes, c.LastAppliedL1Block) chunkBytes = append(chunkBytes, c.L1BlockRangeHash.Bytes()...) return chunkBytes, nil diff --git a/common/types/message/message.go b/common/types/message/message.go index 4354fc5754..56fa2ff3fb 100644 --- a/common/types/message/message.go +++ b/common/types/message/message.go @@ -257,13 +257,14 @@ func (z *ProofDetail) Hash() ([]byte, error) { // ChunkInfo is for calculating pi_hash for chunk type ChunkInfo struct { - ChainID uint64 `json:"chain_id"` - PrevStateRoot common.Hash `json:"prev_state_root"` - PostStateRoot common.Hash `json:"post_state_root"` - L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` - WithdrawRoot common.Hash `json:"withdraw_root"` - DataHash common.Hash `json:"data_hash"` - IsPadding bool `json:"is_padding"` + ChainID uint64 `json:"chain_id"` + PrevStateRoot common.Hash `json:"prev_state_root"` + PostStateRoot common.Hash `json:"post_state_root"` + WithdrawRoot common.Hash `json:"withdraw_root"` + DataHash common.Hash `json:"data_hash"` + L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` + IsPadding bool `json:"is_padding"` } // ChunkProof includes the proof info that are required for chunk verification and rollup. diff --git a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol index 93004b40ce..d27824b9ba 100644 --- a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol +++ b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol @@ -6,14 +6,16 @@ pragma solidity ^0.8.16; /// @dev Below is the encoding for `BatchHeader` V0, total 89 + ceil(l1MessagePopped / 256) * 32 bytes. /// ```text -/// * Field Bytes Type Index Comments -/// * version 1 uint8 0 The batch version -/// * batchIndex 8 uint64 1 The index of the batch -/// * l1MessagePopped 8 uint64 9 Number of L1 messages popped in the batch -/// * totalL1MessagePopped 8 uint64 17 Number of total L1 message popped after the batch -/// * dataHash 32 bytes32 25 The data hash of the batch -/// * parentBatchHash 32 bytes32 57 The parent batch hash -/// * skippedL1MessageBitmap dynamic uint256[] 89 A bitmap to indicate which L1 messages are skipped in the batch +/// * Field Bytes Type Index Comments +/// * version 1 uint8 0 The batch version +/// * batchIndex 8 uint64 1 The index of the batch +/// * l1MessagePopped 8 uint64 9 Number of L1 messages popped in the batch +/// * totalL1MessagePopped 8 uint64 17 Number of total L1 message popped after the batch +/// * dataHash 32 bytes32 25 The data hash of the batch +/// * parentBatchHash 32 bytes32 57 The parent batch hash +/// * skippedL1MessageBitmap dynamic uint256[] 89 A bitmap to indicate which L1 messages are skipped in the batch +/// * lastAppliedL1Block 8 uint64 89 + ceil(skippedL1MessageBitmap / 256) * 32 The last applied L1 block number +/// * blockRangeHash 32 bytes32 97 + ceil(skippedL1MessageBitmap / 256) * 32 The batch l1 block range hash /// ``` library BatchHeaderV0Codec { /// @notice Load batch header in calldata to memory. @@ -22,7 +24,7 @@ library BatchHeaderV0Codec { /// @return length The length in bytes of the batch header. function loadAndValidate(bytes calldata _batchHeader) internal pure returns (uint256 batchPtr, uint256 length) { length = _batchHeader.length; - require(length >= 89, "batch header length too small"); + require(length >= 97, "batch header length too small"); // copy batch header to memory. assembly { @@ -35,7 +37,7 @@ library BatchHeaderV0Codec { uint256 _l1MessagePopped = BatchHeaderV0Codec.l1MessagePopped(batchPtr); unchecked { - require(length == 89 + ((_l1MessagePopped + 255) / 256) * 32, "wrong bitmap length"); + require(length == 97 + ((_l1MessagePopped + 255) / 256) * 32, "wrong bitmap length"); } } @@ -167,9 +169,33 @@ library BatchHeaderV0Codec { /// @notice Store the skipped L1 message bitmap of batch header. /// @param batchPtr The start memory offset of the batch header in memory. /// @param _skippedL1MessageBitmap The skipped L1 message bitmap. - function storeSkippedBitmap(uint256 batchPtr, bytes calldata _skippedL1MessageBitmap) internal pure { + function storeSkippedBitmap(uint256 batchPtr, bytes calldata _skippedL1MessageBitmap) + internal + pure + returns (uint256 _offset) + { assembly { - calldatacopy(add(batchPtr, 89), _skippedL1MessageBitmap.offset, _skippedL1MessageBitmap.length) + _offset := add(batchPtr, 89) + calldatacopy(_offset, _skippedL1MessageBitmap.offset, _skippedL1MessageBitmap.length) + _offset := add(_offset, _skippedL1MessageBitmap.length) + } + } + + /// @notice Store the last applied L1 block number. + /// @param batchOffset The start memory offset of the batch header + dynamic offset. + /// @param _lastAppliedL1Block The last applied L1 block number. + function storeLastAppliedL1Block(uint256 batchOffset, uint256 _lastAppliedL1Block) internal pure { + assembly { + mstore(add(batchOffset, 89), shl(224, _lastAppliedL1Block)) + } + } + + /// @notice Store the block range hash of batch header. + /// @param batchOffset The start memory offset of the batch header + dynamic offset. + /// @param _blockRangeHash The block range hash. + function storeBlockRangeHash(uint256 batchOffset, bytes32 _blockRangeHash) internal pure { + assembly { + mstore(add(batchOffset, 97), _blockRangeHash) } } diff --git a/coordinator/internal/logic/provertask/batch_prover_task.go b/coordinator/internal/logic/provertask/batch_prover_task.go index b043cd29d3..a22f5cdf9f 100644 --- a/coordinator/internal/logic/provertask/batch_prover_task.go +++ b/coordinator/internal/logic/provertask/batch_prover_task.go @@ -158,12 +158,14 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove chunkProofs = append(chunkProofs, &proof) chunkInfo := message.ChunkInfo{ - ChainID: bp.cfg.L2.ChainID, - PrevStateRoot: common.HexToHash(chunk.ParentChunkStateRoot), - PostStateRoot: common.HexToHash(chunk.StateRoot), - WithdrawRoot: common.HexToHash(chunk.WithdrawRoot), - DataHash: common.HexToHash(chunk.Hash), - IsPadding: false, + ChainID: bp.cfg.L2.ChainID, + PrevStateRoot: common.HexToHash(chunk.ParentChunkStateRoot), + PostStateRoot: common.HexToHash(chunk.StateRoot), + WithdrawRoot: common.HexToHash(chunk.WithdrawRoot), + DataHash: common.HexToHash(chunk.Hash), + L1BlockRangeHash: common.HexToHash(chunk.L1BlockRangeHash), + LastAppliedL1Block: chunk.LastAppliedL1Block, + IsPadding: false, } chunkInfos = append(chunkInfos, &chunkInfo) } diff --git a/database/migrate/migrations/00017_add_l1_block_hashes_fields_to_batch.sql b/database/migrate/migrations/00017_add_l1_block_hashes_fields_to_batch.sql new file mode 100644 index 0000000000..d346ffbfc3 --- /dev/null +++ b/database/migrate/migrations/00017_add_l1_block_hashes_fields_to_batch.sql @@ -0,0 +1,18 @@ + +-- +goose Up +-- +goose StatementBegin + +ALTER TABLE batch + ADD COLUMN last_applied_l1_block BIGINT NOT NULL DEFAULT 0, + ADD COLUMN l1_block_range_hash VARCHAR DEFAULT NULL; + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin + +ALTER TABLE IF EXISTS batch +DROP COLUMN last_applied_l1_block, +DROP COLUMN l1_block_range_hash; + +-- +goose StatementEnd diff --git a/rollup/internal/controller/relayer/l2_relayer.go b/rollup/internal/controller/relayer/l2_relayer.go index 42c4058e71..35d8bd408c 100644 --- a/rollup/internal/controller/relayer/l2_relayer.go +++ b/rollup/internal/controller/relayer/l2_relayer.go @@ -171,6 +171,8 @@ func (r *Layer2Relayer) initializeGenesis() error { RowConsumption: &gethTypes.RowConsumption{}, LastAppliedL1Block: 0, }}, + LastAppliedL1Block: 0, + L1BlockRangeHash: common.Hash{}, } err = r.db.Transaction(func(dbTX *gorm.DB) error { @@ -364,7 +366,9 @@ func (r *Layer2Relayer) ProcessPendingBatches() { return } chunk := &types.Chunk{ - Blocks: wrappedBlocks, + Blocks: wrappedBlocks, + LastAppliedL1Block: c.LastAppliedL1Block, + L1BlockRangeHash: common.HexToHash(c.L1BlockRangeHash), } var chunkBytes []byte chunkBytes, err = chunk.Encode(c.TotalL1MessagesPoppedBefore) diff --git a/rollup/internal/orm/batch.go b/rollup/internal/orm/batch.go index ccd5873b8e..f4d81fd922 100644 --- a/rollup/internal/orm/batch.go +++ b/rollup/internal/orm/batch.go @@ -13,6 +13,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/log" + "golang.org/x/crypto/sha3" "gorm.io/gorm" ) @@ -34,6 +35,9 @@ type Batch struct { ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"` BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` + L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` + // proof ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"` ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"` @@ -269,6 +273,13 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet return nil, err } + hasher := sha3.NewLegacyKeccak256() + var l1BlockRangeHash common.Hash + for _, chunk := range chunks { + hasher.Write(chunk.L1BlockRangeHash.Bytes()) + } + copy(l1BlockRangeHash[:], hasher.Sum(nil)) + numChunks := len(chunks) lastChunkBlockNum := len(chunks[numChunks-1].Blocks) @@ -289,6 +300,8 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet OracleStatus: int16(types.GasOraclePending), TotalL1CommitGas: batchMeta.TotalL1CommitGas, TotalL1CommitCalldataSize: batchMeta.TotalL1CommitCalldataSize, + LastAppliedL1Block: chunks[numChunks-1].LastAppliedL1Block, + L1BlockRangeHash: l1BlockRangeHash, } db := o.db From b484a71e94c53d35d3ed21204a3ba7ca39c6d41f Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Thu, 23 Nov 2023 17:30:29 +0200 Subject: [PATCH 18/59] wip: add update chunk/batch prove/verify --- common/types/batch_header.go | 20 +++++-- contracts/src/L1/L1ViewOracle.sol | 26 ++++----- contracts/src/L1/rollup/ScrollChain.sol | 54 ++++++++++++++++--- .../libraries/codec/BatchHeaderV0Codec.sol | 12 ++--- contracts/src/libraries/codec/ChunkCodec.sol | 24 +++++++-- contracts/src/test/L1ViewOracle.t.sol | 22 ++++---- rollup/internal/orm/batch.go | 10 +--- 7 files changed, 116 insertions(+), 52 deletions(-) diff --git a/common/types/batch_header.go b/common/types/batch_header.go index 14e12ce9ed..dc5a895c1d 100644 --- a/common/types/batch_header.go +++ b/common/types/batch_header.go @@ -38,6 +38,7 @@ type BatchHeader struct { func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64, parentBatchHash common.Hash, chunks []*Chunk) (*BatchHeader, error) { // buffer for storing chunk hashes in order to compute the batch data hash var dataBytes []byte + var l1BlockRangeHashBytes []byte // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int @@ -56,6 +57,7 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64 return nil, err } dataBytes = append(dataBytes, chunkHash.Bytes()...) + l1BlockRangeHashBytes = append(l1BlockRangeHashBytes, chunk.L1BlockRangeHash.Bytes()...) // build skip bitmap for blockID, block := range chunk.Blocks { @@ -95,6 +97,9 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64 // compute data hash dataHash := crypto.Keccak256Hash(dataBytes) + // compute l1 block range hash + l1BlockRangeHash := crypto.Keccak256Hash(l1BlockRangeHashBytes) + // compute skipped bitmap bitmapBytes := make([]byte, len(skippedBitmap)*32) for ii, num := range skippedBitmap { @@ -111,9 +116,8 @@ func NewBatchHeader(version uint8, batchIndex, totalL1MessagePoppedBefore uint64 dataHash: dataHash, parentBatchHash: parentBatchHash, skippedL1MessageBitmap: bitmapBytes, - // TODO: - lastAppliedL1Block: 0, - l1BlockRangeHash: common.Hash{}, + lastAppliedL1Block: chunks[len(chunks)-1].LastAppliedL1Block, + l1BlockRangeHash: l1BlockRangeHash, }, nil } @@ -137,6 +141,16 @@ func (b *BatchHeader) SkippedL1MessageBitmap() []byte { return b.skippedL1MessageBitmap } +// LastAppliedL1Block returns the last applied L1 block in the BatchHeader. +func (b *BatchHeader) LastAppliedL1Block() uint64 { + return b.lastAppliedL1Block +} + +// L1BlockRangeHash returns the batch L1 block range hash in the BatchHeader. +func (b *BatchHeader) L1BlockRangeHash() common.Hash { + return b.l1BlockRangeHash +} + // Encode encodes the BatchHeader into RollupV2 BatchHeaderV0Codec Encoding. func (b *BatchHeader) Encode() []byte { batchBytes := make([]byte, 129+len(b.skippedL1MessageBitmap)) diff --git a/contracts/src/L1/L1ViewOracle.sol b/contracts/src/L1/L1ViewOracle.sol index 6bdcf280f9..abaae0e767 100644 --- a/contracts/src/L1/L1ViewOracle.sol +++ b/contracts/src/L1/L1ViewOracle.sol @@ -6,23 +6,25 @@ import {IL1ViewOracle} from "./IL1ViewOracle.sol"; contract L1ViewOracle is IL1ViewOracle { /** - * @dev Returns hash of all the blockhashes in the range - * @param from The block number to get the hash of blockhashes after. - * @param to The block number to get the hash of blockhashes up to. - * @return hash The keccak hash of all blockhashes in the provided range + * @dev Returns hash of all the blockhashes in the range. + * @param _from The block number to get the hash of blockhashes after. + * @param _to The block number to get the hash of blockhashes up to. + * @return hash_ The keccak hash of all blockhashes in the provided range. */ - function blockRangeHash(uint256 from, uint256 to) external view returns (bytes32 hash) { - require(to >= from, "End must be greater than or equal to start"); - require(to < block.number, "Block range exceeds current block"); + function blockRangeHash(uint256 _from, uint256 _to) external view returns (bytes32 hash_) { + require(_from > 0, "Incorrect from/to range"); + require(_to >= _from, "Incorrect from/to range"); + require(_to < block.number, "Incorrect from/to range"); - hash = 0; + bytes32[] memory blockHashes = new bytes32[](_to - _from + 1); + uint256 cnt = 0; - for (uint256 i = from; i <= to; i++) { + for (uint256 i = _from; i <= _to; i++) { bytes32 blockHash = blockhash(i); - require(blockHash != 0, "Blockhash not available"); - - hash = keccak256(abi.encodePacked(hash, blockHash)); + blockHashes[cnt++] = blockHash; } + + hash_ = keccak256(abi.encodePacked(blockHashes)); } } diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index dd6c2db50b..d9e885958b 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -202,10 +202,14 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { mstore(0x40, add(dataPtr, mul(_chunksLength, 32))) } + uint256 _lastAppliedL1Block; + uint256 _totalNumL1MessagesInChunk; + uint256 _lastAppliedL1BlockInChunk; + bytes32 _l1BlockRangeHashInChunk; // compute the data hash for each chunk uint256 _totalL1MessagesPoppedInBatch; for (uint256 i = 0; i < _chunksLength; i++) { - uint256 _totalNumL1MessagesInChunk = _commitChunk( + (_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk) = _commitChunk( dataPtr, _chunks[i], _totalL1MessagesPoppedInBatch, @@ -213,6 +217,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { _skippedL1MessageBitmap ); + // if it is the last chunk, update the last applied L1 block + if (i == _chunksLength - 1) { + _lastAppliedL1Block = _lastAppliedL1BlockInChunk; + } + unchecked { _totalL1MessagesPoppedInBatch += _totalNumL1MessagesInChunk; _totalL1MessagesPoppedOverall += _totalNumL1MessagesInChunk; @@ -245,10 +254,13 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall); BatchHeaderV0Codec.storeDataHash(batchPtr, _dataHash); BatchHeaderV0Codec.storeParentBatchHash(batchPtr, _parentBatchHash); - BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap); + uint256 batchOffset = BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap); + BatchHeaderV0Codec.storeLastAppliedL1Block(batchOffset, _lastAppliedL1Block); + // TODO: store l1BlockRangeHash + // BatchHeaderV0Codec.storeL1BlockRangeHash(batchOffset, _l1BlockRangeHashInBatch); // compute batch hash - bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 89 + _skippedL1MessageBitmap.length); + bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 129 + _skippedL1MessageBitmap.length); committedBatches[_batchIndex] = _batchHash; emit CommitBatch(_batchIndex, _batchHash); @@ -310,6 +322,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // avoid duplicated verification require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified"); + // TODO: add lastAppliedL1Block and l1BlockRangeHash // compute public input hash bytes32 _publicInputHash = keccak256( abi.encodePacked(layer2ChainId, _prevStateRoot, _postStateRoot, _withdrawRoot, _dataHash) @@ -454,7 +467,15 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { uint256 _totalL1MessagesPoppedInBatch, uint256 _totalL1MessagesPoppedOverall, bytes calldata _skippedL1MessageBitmap - ) internal view returns (uint256 _totalNumL1MessagesInChunk) { + ) + internal + view + returns ( + uint256 _totalNumL1MessagesInChunk, + uint256 _lastAppliedL1BlockInChunk, + bytes32 _l1BlockRangeHashInChunk + ) + { uint256 chunkPtr; uint256 startDataPtr; uint256 dataPtr; @@ -481,7 +502,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { } } assembly { - mstore(0x40, add(dataPtr, mul(_totalTransactionsInChunk, 0x20))) // reserve memory for tx hashes + mstore(0x40, add(add(dataPtr, mul(_totalTransactionsInChunk, 0x20)), 0x28)) // reserve memory for tx hashes and l1 block hashes data } } @@ -492,6 +513,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { blockPtr := add(chunkPtr, 1) // reset block ptr } + uint256 _lastAppliedL1Block; // concatenate tx hashes uint256 l2TxPtr = ChunkCodec.l2TxPtr(chunkPtr, _numBlocks); while (_numBlocks > 0) { @@ -517,6 +539,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { } } + if (_numBlocks == 1) { + // check last block + _lastAppliedL1Block = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); + } + unchecked { _totalNumL1MessagesInChunk += _numL1MessagesInBlock; _totalL1MessagesPoppedInBatch += _numL1MessagesInBlock; @@ -527,11 +554,22 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { } } + uint256 _lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); + bytes32 _l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); + + require(_lastAppliedL1Block == _lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk"); + // check the actual number of transactions in the chunk require((dataPtr - txHashStartDataPtr) / 32 <= maxNumTxInChunk, "too many txs in one chunk"); - // check chunk has correct length - require(l2TxPtr - chunkPtr == _chunk.length, "incomplete l2 transaction data"); + assembly { + mstore(dataPtr, _lastAppliedL1BlockInChunk) + mstore(dataPtr, _l1BlockRangeHashInChunk) + dataPtr := add(dataPtr, 0x28) + } + + // check chunk has correct length. 40 is the length of lastAppliedL1Block and l1BlockRangeHash + require(l2TxPtr - chunkPtr + 40 == _chunk.length, "incomplete l2 transaction data"); // compute data hash and store to memory assembly { @@ -539,7 +577,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { mstore(memPtr, dataHash) } - return _totalNumL1MessagesInChunk; + return (_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk); } /// @dev Internal function to load L1 message hashes from the message queue. diff --git a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol index d27824b9ba..3afec82275 100644 --- a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol +++ b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol @@ -24,7 +24,7 @@ library BatchHeaderV0Codec { /// @return length The length in bytes of the batch header. function loadAndValidate(bytes calldata _batchHeader) internal pure returns (uint256 batchPtr, uint256 length) { length = _batchHeader.length; - require(length >= 97, "batch header length too small"); + require(length >= 129, "batch header length too small"); // copy batch header to memory. assembly { @@ -37,7 +37,7 @@ library BatchHeaderV0Codec { uint256 _l1MessagePopped = BatchHeaderV0Codec.l1MessagePopped(batchPtr); unchecked { - require(length == 97 + ((_l1MessagePopped + 255) / 256) * 32, "wrong bitmap length"); + require(length == 129 + ((_l1MessagePopped + 255) / 256) * 32, "wrong bitmap length"); } } @@ -190,12 +190,12 @@ library BatchHeaderV0Codec { } } - /// @notice Store the block range hash of batch header. + /// @notice Store the l1 block range hash of batch header. /// @param batchOffset The start memory offset of the batch header + dynamic offset. - /// @param _blockRangeHash The block range hash. - function storeBlockRangeHash(uint256 batchOffset, bytes32 _blockRangeHash) internal pure { + /// @param _l1BlockRangeHash The l1 block range hash. + function storeL1BlockRangeHash(uint256 batchOffset, bytes32 _l1BlockRangeHash) internal pure { assembly { - mstore(add(batchOffset, 97), _blockRangeHash) + mstore(add(batchOffset, 97), _l1BlockRangeHash) } } diff --git a/contracts/src/libraries/codec/ChunkCodec.sol b/contracts/src/libraries/codec/ChunkCodec.sol index 3dafc3f1c6..12caa24b45 100644 --- a/contracts/src/libraries/codec/ChunkCodec.sol +++ b/contracts/src/libraries/codec/ChunkCodec.sol @@ -65,11 +65,20 @@ library ChunkCodec { } /// @notice Return the number of last applied L1 block. - /// @param blockPtr The start memory offset of the block context in memory. + /// @param l2TxEndPtr The end memory offset of `l2Transactions`. /// @return _lastAppliedL1Block The number of last applied L1 block. - function lastAppliedL1Block(uint256 blockPtr) internal pure returns (uint256 _lastAppliedL1Block) { + function lastAppliedL1BlockInChunk(uint256 l2TxEndPtr) internal pure returns (uint256 _lastAppliedL1Block) { assembly { - _lastAppliedL1Block := shr(240, mload(add(blockPtr, 60))) + _lastAppliedL1Block := shr(248, mload(l2TxEndPtr)) + } + } + + /// @notice Return the number of last applied L1 block. + /// @param l2TxEndPtr The end memory offset of `l2Transactions`. + /// @return _l1BlockRangeHash The hash of the L1 block range. + function l1BlockRangeHashInChunk(uint256 l2TxEndPtr) internal pure returns (bytes32 _l1BlockRangeHash) { + assembly { + _l1BlockRangeHash := shr(224, mload(add(l2TxEndPtr, 8))) } } @@ -98,6 +107,15 @@ library ChunkCodec { return dstPtr; } + /// @notice Return the number of last applied L1 block. + /// @param blockPtr The start memory offset of the block context in memory. + /// @return _lastAppliedL1Block The number of last applied L1 block. + function lastAppliedL1BlockInBlock(uint256 blockPtr) internal pure returns (uint256 _lastAppliedL1Block) { + assembly { + _lastAppliedL1Block := shr(240, mload(add(blockPtr, 60))) + } + } + /// @notice Return the number of transactions in current block. /// @param blockPtr The start memory offset of the block context in memory. /// @return _numTransactions The number of transactions in current block. diff --git a/contracts/src/test/L1ViewOracle.t.sol b/contracts/src/test/L1ViewOracle.t.sol index 630dadc220..ed6d734015 100644 --- a/contracts/src/test/L1ViewOracle.t.sol +++ b/contracts/src/test/L1ViewOracle.t.sol @@ -21,27 +21,27 @@ contract L1ViewOracleTest is DSTestPlus { uint256 from = block.number - 260; uint256 to = from + 5; - bytes32 hash = oracle.blockRangeHash(from, to); + oracle.blockRangeHash(from, to); } function testTooNewBlocks() external { - hevm.expectRevert("Block range exceeds current block"); + hevm.expectRevert("Incorrect from/to range"); hevm.roll(10); uint256 from = block.number - 5; uint256 to = block.number + 5; - bytes32 hash = oracle.blockRangeHash(from, to); + oracle.blockRangeHash(from, to); } function testInvalidRange() external { - hevm.expectRevert("End must be greater than or equal to start"); + hevm.expectRevert("Incorrect from/to range"); uint256 from = 200; uint256 to = 100; - bytes32 hash = oracle.blockRangeHash(from, to); + oracle.blockRangeHash(from, to); } function testCorrectness() external { @@ -49,19 +49,19 @@ contract L1ViewOracleTest is DSTestPlus { uint256 from = 15; uint256 to = 48; + bytes32[] memory blockHashes = new bytes32[](to - from + 1); + uint256 cnt = 0; - bytes32 expectedHash = 0; + bytes32 blockRangeHash = oracle.blockRangeHash(from, to); for (uint256 i = from; i <= to; i++) { bytes32 blockHash = blockhash(i); - require(blockHash != 0, "Blockhash not available"); - - expectedHash = keccak256(abi.encodePacked(expectedHash, blockHash)); + blockHashes[cnt++] = blockHash; } - bytes32 gotHash = oracle.blockRangeHash(from, to); + bytes32 expected = keccak256(abi.encodePacked(blockHashes)); - assertEq(expectedHash, gotHash); + assertEq(blockRangeHash, expected); } } diff --git a/rollup/internal/orm/batch.go b/rollup/internal/orm/batch.go index f4d81fd922..60b11d40ef 100644 --- a/rollup/internal/orm/batch.go +++ b/rollup/internal/orm/batch.go @@ -13,7 +13,6 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/log" - "golang.org/x/crypto/sha3" "gorm.io/gorm" ) @@ -273,13 +272,6 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet return nil, err } - hasher := sha3.NewLegacyKeccak256() - var l1BlockRangeHash common.Hash - for _, chunk := range chunks { - hasher.Write(chunk.L1BlockRangeHash.Bytes()) - } - copy(l1BlockRangeHash[:], hasher.Sum(nil)) - numChunks := len(chunks) lastChunkBlockNum := len(chunks[numChunks-1].Blocks) @@ -301,7 +293,7 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet TotalL1CommitGas: batchMeta.TotalL1CommitGas, TotalL1CommitCalldataSize: batchMeta.TotalL1CommitCalldataSize, LastAppliedL1Block: chunks[numChunks-1].LastAppliedL1Block, - L1BlockRangeHash: l1BlockRangeHash, + L1BlockRangeHash: batchHeader.L1BlockRangeHash(), } db := o.db From aaa6eca8c55d987435c8556ff8097dc4c6954977 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Mon, 27 Nov 2023 17:57:36 +0200 Subject: [PATCH 19/59] wip: add update chunk/batch prove/verify --- .../foundry/InitializeL1BridgeContracts.s.sol | 4 +- contracts/src/L1/rollup/IScrollChain.sol | 3 +- contracts/src/L1/rollup/ScrollChain.sol | 64 +++++++++++++----- .../libraries/codec/BatchHeaderV0Codec.sol | 67 ++++++++++++++----- contracts/src/libraries/codec/ChunkCodec.sol | 2 +- contracts/src/test/L1GatewayTestBase.t.sol | 7 +- contracts/src/test/ScrollChain.t.sol | 49 +++++++------- .../internal/controller/relayer/l2_relayer.go | 10 ++- rollup/internal/orm/chunk.go | 18 +++++ 9 files changed, 164 insertions(+), 60 deletions(-) diff --git a/contracts/scripts/foundry/InitializeL1BridgeContracts.s.sol b/contracts/scripts/foundry/InitializeL1BridgeContracts.s.sol index e833b811ee..5482adaa77 100644 --- a/contracts/scripts/foundry/InitializeL1BridgeContracts.s.sol +++ b/contracts/scripts/foundry/InitializeL1BridgeContracts.s.sol @@ -27,6 +27,7 @@ contract InitializeL1BridgeContracts is Script { address L1_FINALIZE_SENDER_ADDRESS = vm.envAddress("L1_FINALIZE_SENDER_ADDRESS"); address L1_FEE_VAULT_ADDR = vm.envAddress("L1_FEE_VAULT_ADDR"); address L1_WETH_ADDR = vm.envAddress("L1_WETH_ADDR"); + address L1_VIEW_ORACLE_ADDR = vm.envAddress("L1_VIEW_ORACLE_ADDR"); address L1_WHITELIST_ADDR = vm.envAddress("L1_WHITELIST_ADDR"); address L1_SCROLL_CHAIN_PROXY_ADDR = vm.envAddress("L1_SCROLL_CHAIN_PROXY_ADDR"); @@ -61,7 +62,8 @@ contract InitializeL1BridgeContracts is Script { ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).initialize( L1_MESSAGE_QUEUE_PROXY_ADDR, L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR, - MAX_TX_IN_CHUNK + MAX_TX_IN_CHUNK, + L1_VIEW_ORACLE_ADDR ); ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addSequencer(L1_COMMIT_SENDER_ADDRESS); ScrollChain(L1_SCROLL_CHAIN_PROXY_ADDR).addProver(L1_FINALIZE_SENDER_ADDRESS); diff --git a/contracts/src/L1/rollup/IScrollChain.sol b/contracts/src/L1/rollup/IScrollChain.sol index 9f86c2046e..715f784017 100644 --- a/contracts/src/L1/rollup/IScrollChain.sol +++ b/contracts/src/L1/rollup/IScrollChain.sol @@ -61,7 +61,8 @@ interface IScrollChain { uint8 version, bytes calldata parentBatchHeader, bytes[] memory chunks, - bytes calldata skippedL1MessageBitmap + bytes calldata skippedL1MessageBitmap, + uint64 _prevLastAppliedL1Block ) external; /// @notice Revert a pending batch. diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index d9e885958b..047f014c7f 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -10,6 +10,7 @@ import {IScrollChain} from "./IScrollChain.sol"; import {BatchHeaderV0Codec} from "../../libraries/codec/BatchHeaderV0Codec.sol"; import {ChunkCodec} from "../../libraries/codec/ChunkCodec.sol"; import {IRollupVerifier} from "../../libraries/verifier/IRollupVerifier.sol"; +import {IL1ViewOracle} from "../L1ViewOracle.sol"; // solhint-disable no-inline-assembly // solhint-disable reason-string @@ -79,6 +80,9 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { /// @inheritdoc IScrollChain mapping(uint256 => bytes32) public override withdrawRoots; + /// @notice The address of L1ViewOracle. + address public l1ViewOracle; + /********************** * Function Modifiers * **********************/ @@ -107,13 +111,15 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { function initialize( address _messageQueue, address _verifier, - uint256 _maxNumTxInChunk + uint256 _maxNumTxInChunk, + address _l1ViewOracle ) public initializer { OwnableUpgradeable.__Ownable_init(); messageQueue = _messageQueue; verifier = _verifier; maxNumTxInChunk = _maxNumTxInChunk; + l1ViewOracle = _l1ViewOracle; emit UpdateVerifier(address(0), _verifier); emit UpdateMaxNumTxInChunk(0, _maxNumTxInChunk); @@ -165,7 +171,8 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { uint8 _version, bytes calldata _parentBatchHeader, bytes[] memory _chunks, - bytes calldata _skippedL1MessageBitmap + bytes calldata _skippedL1MessageBitmap, + uint64 _prevLastAppliedL1Block ) external override OnlySequencer whenNotPaused { require(_version == 0, "invalid version"); @@ -202,12 +209,14 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { mstore(0x40, add(dataPtr, mul(_chunksLength, 32))) } - uint256 _lastAppliedL1Block; + uint64 _lastAppliedL1Block; uint256 _totalNumL1MessagesInChunk; - uint256 _lastAppliedL1BlockInChunk; + uint64 _lastAppliedL1BlockInChunk; bytes32 _l1BlockRangeHashInChunk; + // compute the data hash for each chunk uint256 _totalL1MessagesPoppedInBatch; + bytes32[] memory _l1BlockRangeHashes = new bytes32[](_chunksLength); for (uint256 i = 0; i < _chunksLength; i++) { (_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk) = _commitChunk( dataPtr, @@ -217,6 +226,17 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { _skippedL1MessageBitmap ); + if (_prevLastAppliedL1Block != 0) { + bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( + _prevLastAppliedL1Block, + _lastAppliedL1BlockInChunk + ); + + require(_l1BlockRangeHash == _l1BlockRangeHashInChunk, "incorrect l1 block range hash"); + _l1BlockRangeHashes[i] = _l1BlockRangeHashInChunk; + _prevLastAppliedL1Block = _lastAppliedL1BlockInChunk; + } + // if it is the last chunk, update the last applied L1 block if (i == _chunksLength - 1) { _lastAppliedL1Block = _lastAppliedL1BlockInChunk; @@ -247,6 +267,9 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { _batchIndex := add(_batchIndex, 1) // increase batch index } + bytes32 _l1BlockRangeHashInBatch = keccak256(abi.encodePacked(_l1BlockRangeHashes)); + uint256 _skippedL1MessageBitmapLength = _skippedL1MessageBitmap.length; + // store entries, the order matters BatchHeaderV0Codec.storeVersion(batchPtr, _version); BatchHeaderV0Codec.storeBatchIndex(batchPtr, _batchIndex); @@ -254,13 +277,12 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall); BatchHeaderV0Codec.storeDataHash(batchPtr, _dataHash); BatchHeaderV0Codec.storeParentBatchHash(batchPtr, _parentBatchHash); - uint256 batchOffset = BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap); - BatchHeaderV0Codec.storeLastAppliedL1Block(batchOffset, _lastAppliedL1Block); - // TODO: store l1BlockRangeHash - // BatchHeaderV0Codec.storeL1BlockRangeHash(batchOffset, _l1BlockRangeHashInBatch); + BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap); + BatchHeaderV0Codec.storeLastAppliedL1Block(batchPtr, _skippedL1MessageBitmapLength, _lastAppliedL1Block); + BatchHeaderV0Codec.storeL1BlockRangeHash(batchPtr, _skippedL1MessageBitmapLength, _l1BlockRangeHashInBatch); // compute batch hash - bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 129 + _skippedL1MessageBitmap.length); + bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 129 + _skippedL1MessageBitmapLength); committedBatches[_batchIndex] = _batchHash; emit CommitBatch(_batchIndex, _batchHash); @@ -314,6 +336,10 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { bytes32 _dataHash = BatchHeaderV0Codec.dataHash(memPtr); uint256 _batchIndex = BatchHeaderV0Codec.batchIndex(memPtr); + uint256 _l1MessagePopped = BatchHeaderV0Codec.l1MessagePopped(memPtr); + uint256 _skippedBitmapLength = _l1MessagePopped * 256; + uint256 _lastAppliedL1Block = BatchHeaderV0Codec.lastAppliedL1Block(memPtr, _skippedBitmapLength); + bytes32 _l1BlockRangeHash = BatchHeaderV0Codec.l1BlockRangeHash(memPtr, _skippedBitmapLength); require(committedBatches[_batchIndex] == _batchHash, "incorrect batch hash"); // verify previous state root. @@ -325,7 +351,15 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // TODO: add lastAppliedL1Block and l1BlockRangeHash // compute public input hash bytes32 _publicInputHash = keccak256( - abi.encodePacked(layer2ChainId, _prevStateRoot, _postStateRoot, _withdrawRoot, _dataHash) + abi.encodePacked( + layer2ChainId, + _prevStateRoot, + _postStateRoot, + _withdrawRoot, + _dataHash, + _lastAppliedL1Block, + _l1BlockRangeHash + ) ); // verify batch @@ -342,7 +376,6 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { withdrawRoots[_batchIndex] = _withdrawRoot; // Pop finalized and non-skipped message from L1MessageQueue. - uint256 _l1MessagePopped = BatchHeaderV0Codec.l1MessagePopped(memPtr); if (_l1MessagePopped > 0) { IL1MessageQueue _queue = IL1MessageQueue(messageQueue); @@ -472,7 +505,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { view returns ( uint256 _totalNumL1MessagesInChunk, - uint256 _lastAppliedL1BlockInChunk, + uint64 _lastAppliedL1BlockInChunk, bytes32 _l1BlockRangeHashInChunk ) { @@ -554,8 +587,8 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { } } - uint256 _lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); - bytes32 _l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); + _lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); + _l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); require(_lastAppliedL1Block == _lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk"); @@ -568,7 +601,8 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { dataPtr := add(dataPtr, 0x28) } - // check chunk has correct length. 40 is the length of lastAppliedL1Block and l1BlockRangeHash + // check chunk has correct length. + // 40 is the size of lastAppliedL1Block and l1BlockRangeHash. require(l2TxPtr - chunkPtr + 40 == _chunk.length, "incomplete l2 transaction data"); // compute data hash and store to memory diff --git a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol index 3afec82275..a867ef710c 100644 --- a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol +++ b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol @@ -4,7 +4,7 @@ pragma solidity ^0.8.16; // solhint-disable no-inline-assembly -/// @dev Below is the encoding for `BatchHeader` V0, total 89 + ceil(l1MessagePopped / 256) * 32 bytes. +/// @dev Below is the encoding for `BatchHeader` V0, total 129 + ceil(l1MessagePopped / 256) * 32 bytes. /// ```text /// * Field Bytes Type Index Comments /// * version 1 uint8 0 The batch version @@ -106,6 +106,36 @@ library BatchHeaderV0Codec { } } + /// @notice Get the last applied L1 block number. + /// @param batchPtr The start memory offset of the batch header in memory. + /// @param _skippedBitmapLength The length of the skipped L1 message bitmap. + /// @return _lastAppliedL1Block The last applied L1 block number. + function lastAppliedL1Block(uint256 batchPtr, uint256 _skippedBitmapLength) + internal + pure + returns (uint256 _lastAppliedL1Block) + { + assembly { + batchPtr := add(batchPtr, 89) + _lastAppliedL1Block := mload(add(batchPtr, _skippedBitmapLength)) + } + } + + /// @notice Get the l1 block range hash. + /// @param batchPtr The start memory offset of the batch header in memory. + /// @param _skippedBitmapLength The length of the skipped L1 message bitmap. + /// @return _l1BlockRangeHash The l1 block range hash. + function l1BlockRangeHash(uint256 batchPtr, uint256 _skippedBitmapLength) + internal + pure + returns (bytes32 _l1BlockRangeHash) + { + assembly { + batchPtr := add(batchPtr, 97) + _l1BlockRangeHash := mload(add(batchPtr, _skippedBitmapLength)) + } + } + /// @notice Store the version of batch header. /// @param batchPtr The start memory offset of the batch header in memory. /// @param _version The version of batch header. @@ -169,33 +199,38 @@ library BatchHeaderV0Codec { /// @notice Store the skipped L1 message bitmap of batch header. /// @param batchPtr The start memory offset of the batch header in memory. /// @param _skippedL1MessageBitmap The skipped L1 message bitmap. - function storeSkippedBitmap(uint256 batchPtr, bytes calldata _skippedL1MessageBitmap) - internal - pure - returns (uint256 _offset) - { + function storeSkippedBitmap(uint256 batchPtr, bytes calldata _skippedL1MessageBitmap) internal pure { assembly { - _offset := add(batchPtr, 89) - calldatacopy(_offset, _skippedL1MessageBitmap.offset, _skippedL1MessageBitmap.length) - _offset := add(_offset, _skippedL1MessageBitmap.length) + calldatacopy(add(batchPtr, 89), _skippedL1MessageBitmap.offset, _skippedL1MessageBitmap.length) } } /// @notice Store the last applied L1 block number. - /// @param batchOffset The start memory offset of the batch header + dynamic offset. + /// @param batchPtr The start memory offset of the batch header in memory. + /// @param _skippedL1MessageBitmapLength The length of the skipped L1 message bitmap. /// @param _lastAppliedL1Block The last applied L1 block number. - function storeLastAppliedL1Block(uint256 batchOffset, uint256 _lastAppliedL1Block) internal pure { + function storeLastAppliedL1Block( + uint256 batchPtr, + uint256 _skippedL1MessageBitmapLength, + uint256 _lastAppliedL1Block + ) internal pure { assembly { - mstore(add(batchOffset, 89), shl(224, _lastAppliedL1Block)) + mstore(add(batchPtr, _skippedL1MessageBitmapLength), shl(224, _lastAppliedL1Block)) } } /// @notice Store the l1 block range hash of batch header. - /// @param batchOffset The start memory offset of the batch header + dynamic offset. + /// @param batchPtr The start memory offset of the batch header in memory. + /// @param _skippedL1MessageBitmapLength The length of the skipped L1 message bitmap. /// @param _l1BlockRangeHash The l1 block range hash. - function storeL1BlockRangeHash(uint256 batchOffset, bytes32 _l1BlockRangeHash) internal pure { - assembly { - mstore(add(batchOffset, 97), _l1BlockRangeHash) + function storeL1BlockRangeHash( + uint256 batchPtr, + uint256 _skippedL1MessageBitmapLength, + bytes32 _l1BlockRangeHash + ) internal pure { + assembly { + batchPtr := add(batchPtr, 8) + mstore(add(batchPtr, _skippedL1MessageBitmapLength), _l1BlockRangeHash) } } diff --git a/contracts/src/libraries/codec/ChunkCodec.sol b/contracts/src/libraries/codec/ChunkCodec.sol index 12caa24b45..a73efaf320 100644 --- a/contracts/src/libraries/codec/ChunkCodec.sol +++ b/contracts/src/libraries/codec/ChunkCodec.sol @@ -67,7 +67,7 @@ library ChunkCodec { /// @notice Return the number of last applied L1 block. /// @param l2TxEndPtr The end memory offset of `l2Transactions`. /// @return _lastAppliedL1Block The number of last applied L1 block. - function lastAppliedL1BlockInChunk(uint256 l2TxEndPtr) internal pure returns (uint256 _lastAppliedL1Block) { + function lastAppliedL1BlockInChunk(uint256 l2TxEndPtr) internal pure returns (uint64 _lastAppliedL1Block) { assembly { _lastAppliedL1Block := shr(248, mload(l2TxEndPtr)) } diff --git a/contracts/src/test/L1GatewayTestBase.t.sol b/contracts/src/test/L1GatewayTestBase.t.sol index 2d7f2c8adc..1f8a361146 100644 --- a/contracts/src/test/L1GatewayTestBase.t.sol +++ b/contracts/src/test/L1GatewayTestBase.t.sol @@ -10,6 +10,7 @@ import {EnforcedTxGateway} from "../L1/gateways/EnforcedTxGateway.sol"; import {L1MessageQueue} from "../L1/rollup/L1MessageQueue.sol"; import {L2GasPriceOracle} from "../L1/rollup/L2GasPriceOracle.sol"; import {ScrollChain, IScrollChain} from "../L1/rollup/ScrollChain.sol"; +import {L1ViewOracle} from "../L1/L1ViewOracle.sol"; import {Whitelist} from "../L2/predeploys/Whitelist.sol"; import {L1ScrollMessenger} from "../L1/L1ScrollMessenger.sol"; import {L2ScrollMessenger} from "../L2/L2ScrollMessenger.sol"; @@ -46,6 +47,7 @@ abstract contract L1GatewayTestBase is DSTestPlus { uint32 internal constant defaultGasLimit = 1000000; + L1ViewOracle internal l1ViewOracle; L1ScrollMessenger internal l1Messenger; L1MessageQueue internal messageQueue; L2GasPriceOracle internal gasOracle; @@ -71,6 +73,7 @@ abstract contract L1GatewayTestBase is DSTestPlus { feeVault = address(uint160(address(this)) - 1); // Deploy L1 contracts + l1ViewOracle = new L1ViewOracle(); l1Messenger = L1ScrollMessenger(payable(new ERC1967Proxy(address(new L1ScrollMessenger()), new bytes(0)))); messageQueue = L1MessageQueue(address(new ERC1967Proxy(address(new L1MessageQueue()), new bytes(0)))); gasOracle = L2GasPriceOracle(address(new ERC1967Proxy(address(new L2GasPriceOracle()), new bytes(0)))); @@ -95,7 +98,7 @@ abstract contract L1GatewayTestBase is DSTestPlus { ); gasOracle.initialize(1, 2, 1, 1); gasOracle.updateWhitelist(address(whitelist)); - rollup.initialize(address(messageQueue), address(verifier), 44); + rollup.initialize(address(messageQueue), address(verifier), 44, address(l1ViewOracle)); address[] memory _accounts = new address[](1); _accounts[0] = address(this); @@ -123,7 +126,7 @@ abstract contract L1GatewayTestBase is DSTestPlus { chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); bytes memory batchHeader1 = new bytes(89); diff --git a/contracts/src/test/ScrollChain.t.sol b/contracts/src/test/ScrollChain.t.sol index a3b07b2934..7fbddd0e4f 100644 --- a/contracts/src/test/ScrollChain.t.sol +++ b/contracts/src/test/ScrollChain.t.sol @@ -7,6 +7,7 @@ import {DSTestPlus} from "solmate/test/utils/DSTestPlus.sol"; import {ERC1967Proxy} from "@openzeppelin/contracts/proxy/ERC1967/ERC1967Proxy.sol"; import {L1MessageQueue} from "../L1/rollup/L1MessageQueue.sol"; +import {L1ViewOracle} from "../L1/L1ViewOracle.sol"; import {ScrollChain, IScrollChain} from "../L1/rollup/ScrollChain.sol"; import {MockScrollChain} from "./mocks/MockScrollChain.sol"; @@ -25,17 +26,19 @@ contract ScrollChainTest is DSTestPlus { event FinalizeBatch(uint256 indexed batchIndex, bytes32 indexed batchHash, bytes32 stateRoot, bytes32 withdrawRoot); event RevertBatch(uint256 indexed batchIndex, bytes32 indexed batchHash); + L1ViewOracle private l1ViewOracle; ScrollChain private rollup; L1MessageQueue internal messageQueue; MockScrollChain internal chain; MockRollupVerifier internal verifier; function setUp() public { + l1ViewOracle = new L1ViewOracle(); messageQueue = L1MessageQueue(address(new ERC1967Proxy(address(new L1MessageQueue()), new bytes(0)))); rollup = ScrollChain(address(new ERC1967Proxy(address(new ScrollChain(233)), new bytes(0)))); verifier = new MockRollupVerifier(); - rollup.initialize(address(messageQueue), address(verifier), 100); + rollup.initialize(address(messageQueue), address(verifier), 100, address(l1ViewOracle)); messageQueue.initialize(address(this), address(rollup), address(0), address(0), 1000000); chain = new MockScrollChain(); @@ -46,7 +49,7 @@ contract ScrollChainTest is DSTestPlus { assertEq(rollup.layer2ChainId(), 233); hevm.expectRevert("Initializable: contract is already initialized"); - rollup.initialize(address(messageQueue), address(0), 100); + rollup.initialize(address(messageQueue), address(0), 100, address(l1ViewOracle)); } function testCommitBatch() public { @@ -65,32 +68,32 @@ contract ScrollChainTest is DSTestPlus { // caller not sequencer, revert hevm.expectRevert("caller not sequencer"); - rollup.commitBatch(0, batchHeader0, new bytes[](0), new bytes(0)); + rollup.commitBatch(0, batchHeader0, new bytes[](0), new bytes(0), 0); rollup.addSequencer(address(0)); // invalid version, revert hevm.startPrank(address(0)); hevm.expectRevert("invalid version"); - rollup.commitBatch(1, batchHeader0, new bytes[](0), new bytes(0)); + rollup.commitBatch(1, batchHeader0, new bytes[](0), new bytes(0), 0); hevm.stopPrank(); // batch is empty, revert hevm.startPrank(address(0)); hevm.expectRevert("batch is empty"); - rollup.commitBatch(0, batchHeader0, new bytes[](0), new bytes(0)); + rollup.commitBatch(0, batchHeader0, new bytes[](0), new bytes(0), 0); hevm.stopPrank(); // batch header length too small, revert hevm.startPrank(address(0)); hevm.expectRevert("batch header length too small"); - rollup.commitBatch(0, new bytes(88), new bytes[](1), new bytes(0)); + rollup.commitBatch(0, new bytes(88), new bytes[](1), new bytes(0), 0); hevm.stopPrank(); // wrong bitmap length, revert hevm.startPrank(address(0)); hevm.expectRevert("wrong bitmap length"); - rollup.commitBatch(0, new bytes(90), new bytes[](1), new bytes(0)); + rollup.commitBatch(0, new bytes(90), new bytes[](1), new bytes(0), 0); hevm.stopPrank(); // incorrect parent batch hash, revert @@ -99,7 +102,7 @@ contract ScrollChainTest is DSTestPlus { } hevm.startPrank(address(0)); hevm.expectRevert("incorrect parent batch hash"); - rollup.commitBatch(0, batchHeader0, new bytes[](1), new bytes(0)); + rollup.commitBatch(0, batchHeader0, new bytes[](1), new bytes(0), 0); hevm.stopPrank(); assembly { mstore(add(batchHeader0, add(0x20, 25)), 1) // change back @@ -113,7 +116,7 @@ contract ScrollChainTest is DSTestPlus { chunks[0] = chunk0; hevm.startPrank(address(0)); hevm.expectRevert("no block in chunk"); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); // invalid chunk length, revert @@ -122,7 +125,7 @@ contract ScrollChainTest is DSTestPlus { chunks[0] = chunk0; hevm.startPrank(address(0)); hevm.expectRevert("invalid chunk length"); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); // cannot skip last L1 message, revert @@ -135,7 +138,7 @@ contract ScrollChainTest is DSTestPlus { chunks[0] = chunk0; hevm.startPrank(address(0)); hevm.expectRevert("cannot skip last L1 message"); - rollup.commitBatch(0, batchHeader0, chunks, bitmap); + rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); hevm.stopPrank(); // num txs less than num L1 msgs, revert @@ -148,7 +151,7 @@ contract ScrollChainTest is DSTestPlus { chunks[0] = chunk0; hevm.startPrank(address(0)); hevm.expectRevert("num txs less than num L1 msgs"); - rollup.commitBatch(0, batchHeader0, chunks, bitmap); + rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); hevm.stopPrank(); // incomplete l2 transaction data, revert @@ -157,7 +160,7 @@ contract ScrollChainTest is DSTestPlus { chunks[0] = chunk0; hevm.startPrank(address(0)); hevm.expectRevert("incomplete l2 transaction data"); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); // commit batch with one chunk, no tx, correctly @@ -165,14 +168,14 @@ contract ScrollChainTest is DSTestPlus { chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); assertGt(uint256(rollup.committedBatches(1)), 0); // batch is already committed, revert hevm.startPrank(address(0)); hevm.expectRevert("batch already committed"); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); } @@ -201,7 +204,7 @@ contract ScrollChainTest is DSTestPlus { chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); assertGt(uint256(rollup.committedBatches(1)), 0); @@ -348,7 +351,7 @@ contract ScrollChainTest is DSTestPlus { hevm.startPrank(address(0)); hevm.expectEmit(true, true, false, true); emit CommitBatch(1, bytes32(0x00847173b29b238cf319cde79512b7c213e5a8b4138daa7051914c4592b6dfc7)); - rollup.commitBatch(0, batchHeader0, chunks, bitmap); + rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); hevm.stopPrank(); assertBoolEq(rollup.isBatchFinalized(1), false); bytes32 batchHash1 = rollup.committedBatches(1); @@ -475,19 +478,19 @@ contract ScrollChainTest is DSTestPlus { rollup.updateMaxNumTxInChunk(2); // 3 - 1 hevm.startPrank(address(0)); hevm.expectRevert("too many txs in one chunk"); - rollup.commitBatch(0, batchHeader1, chunks, bitmap); // first chunk with too many txs + rollup.commitBatch(0, batchHeader1, chunks, bitmap, 0); // first chunk with too many txs hevm.stopPrank(); rollup.updateMaxNumTxInChunk(185); // 5+10+300 - 2 - 127 hevm.startPrank(address(0)); hevm.expectRevert("too many txs in one chunk"); - rollup.commitBatch(0, batchHeader1, chunks, bitmap); // second chunk with too many txs + rollup.commitBatch(0, batchHeader1, chunks, bitmap, 0); // second chunk with too many txs hevm.stopPrank(); rollup.updateMaxNumTxInChunk(186); hevm.startPrank(address(0)); hevm.expectEmit(true, true, false, true); emit CommitBatch(2, bytes32(0x03a9cdcb9d582251acf60937db006ec99f3505fd4751b7c1f92c9a8ef413e873)); - rollup.commitBatch(0, batchHeader1, chunks, bitmap); + rollup.commitBatch(0, batchHeader1, chunks, bitmap, 0); hevm.stopPrank(); assertBoolEq(rollup.isBatchFinalized(2), false); bytes32 batchHash2 = rollup.committedBatches(2); @@ -558,7 +561,7 @@ contract ScrollChainTest is DSTestPlus { chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); bytes memory batchHeader1 = new bytes(89); @@ -573,7 +576,7 @@ contract ScrollChainTest is DSTestPlus { // commit another batch hevm.startPrank(address(0)); - rollup.commitBatch(0, batchHeader1, chunks, new bytes(0)); + rollup.commitBatch(0, batchHeader1, chunks, new bytes(0), 0); hevm.stopPrank(); // count must be nonzero, revert @@ -678,7 +681,7 @@ contract ScrollChainTest is DSTestPlus { hevm.startPrank(address(0)); hevm.expectRevert("Pausable: paused"); - rollup.commitBatch(0, new bytes(0), new bytes[](0), new bytes(0)); + rollup.commitBatch(0, new bytes(0), new bytes[](0), new bytes(0), 0); hevm.expectRevert("Pausable: paused"); rollup.finalizeBatchWithProof(new bytes(0), bytes32(0), bytes32(0), bytes32(0), new bytes(0)); hevm.stopPrank(); diff --git a/rollup/internal/controller/relayer/l2_relayer.go b/rollup/internal/controller/relayer/l2_relayer.go index 35d8bd408c..4965a1c85b 100644 --- a/rollup/internal/controller/relayer/l2_relayer.go +++ b/rollup/internal/controller/relayer/l2_relayer.go @@ -355,6 +355,14 @@ func (r *Layer2Relayer) ProcessPendingBatches() { return } + parentBatchLatestChunk, err := r.chunkOrm.GetChunkByHash(r.ctx, dbChunks[0].ParentChunkHash) + if err != nil { + log.Error("Failed to fetch parent chunk", + "chunk index", dbChunks[0].Index, + "error", err) + return + } + encodedChunks := make([][]byte, len(dbChunks)) for i, c := range dbChunks { var wrappedBlocks []*types.WrappedBlock @@ -379,7 +387,7 @@ func (r *Layer2Relayer) ProcessPendingBatches() { encodedChunks[i] = chunkBytes } - calldata, err := r.l1RollupABI.Pack("commitBatch", currentBatchHeader.Version(), parentBatch.BatchHeader, encodedChunks, currentBatchHeader.SkippedL1MessageBitmap()) + calldata, err := r.l1RollupABI.Pack("commitBatch", currentBatchHeader.Version(), parentBatch.BatchHeader, encodedChunks, currentBatchHeader.SkippedL1MessageBitmap(), parentBatchLatestChunk.LastAppliedL1Block) if err != nil { log.Error("Failed to pack commitBatch", "index", batch.Index, "error", err) return diff --git a/rollup/internal/orm/chunk.go b/rollup/internal/orm/chunk.go index 49da23e485..ac6196791f 100644 --- a/rollup/internal/orm/chunk.go +++ b/rollup/internal/orm/chunk.go @@ -102,6 +102,24 @@ func (o *Chunk) GetLatestChunk(ctx context.Context) (*Chunk, error) { return &latestChunk, nil } +// GetChunkByHash retrieves the first chunk associated with a specific chunk hash. +func (o *Chunk) GetChunkByHash(ctx context.Context, hash string) (*Chunk, error) { + db := o.db.WithContext(ctx) + db = db.Model(&Chunk{}) + db = db.Where("hash", hash) + + var chunk Chunk + err := db.First(&chunk).Error + if err != nil && errors.Is(err, gorm.ErrRecordNotFound) { + return nil, nil + } + + if err != nil { + return nil, fmt.Errorf("Chunk.GetChunkByHash error: %w", err) + } + return &chunk, nil +} + // GetUnchunkedBlockHeight retrieves the first unchunked block number. func (o *Chunk) GetUnchunkedBlockHeight(ctx context.Context) (uint64, error) { // Get the latest chunk From a2f94743593557d7bc80279215447180189bce7b Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 28 Nov 2023 17:14:32 +0200 Subject: [PATCH 20/59] compile(contracts): add temporary via-ir flag --- contracts/docs/apis/ScrollChain.md | 23 +++++++++++++++++++++-- contracts/foundry.toml | 1 + contracts/hardhat.config.ts | 1 + 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/contracts/docs/apis/ScrollChain.md b/contracts/docs/apis/ScrollChain.md index 1a4fe5a55d..32d3167041 100644 --- a/contracts/docs/apis/ScrollChain.md +++ b/contracts/docs/apis/ScrollChain.md @@ -45,7 +45,7 @@ Add an account to the sequencer list. ### commitBatch ```solidity -function commitBatch(uint8 _version, bytes _parentBatchHeader, bytes[] _chunks, bytes _skippedL1MessageBitmap) external nonpayable +function commitBatch(uint8 _version, bytes _parentBatchHeader, bytes[] _chunks, bytes _skippedL1MessageBitmap, uint64 _prevLastAppliedL1Block) external nonpayable ``` Commit a batch of transactions on layer 1. @@ -60,6 +60,7 @@ Commit a batch of transactions on layer 1. | _parentBatchHeader | bytes | undefined | | _chunks | bytes[] | undefined | | _skippedL1MessageBitmap | bytes | undefined | +| _prevLastAppliedL1Block | uint64 | undefined | ### committedBatches @@ -145,7 +146,7 @@ Import layer 2 genesis block ### initialize ```solidity -function initialize(address _messageQueue, address _verifier, uint256 _maxNumTxInChunk) external nonpayable +function initialize(address _messageQueue, address _verifier, uint256 _maxNumTxInChunk, address _l1ViewOracle) external nonpayable ``` @@ -159,6 +160,7 @@ function initialize(address _messageQueue, address _verifier, uint256 _maxNumTxI | _messageQueue | address | undefined | | _verifier | address | undefined | | _maxNumTxInChunk | uint256 | undefined | +| _l1ViewOracle | address | undefined | ### isBatchFinalized @@ -226,6 +228,23 @@ Whether an account is a sequencer. |---|---|---| | _0 | bool | undefined | +### l1ViewOracle + +```solidity +function l1ViewOracle() external view returns (address) +``` + +The address of L1ViewOracle. + + + + +#### Returns + +| Name | Type | Description | +|---|---|---| +| _0 | address | undefined | + ### lastFinalizedBatchIndex ```solidity diff --git a/contracts/foundry.toml b/contracts/foundry.toml index ca3af0b164..da6a644fc0 100644 --- a/contracts/foundry.toml +++ b/contracts/foundry.toml @@ -11,6 +11,7 @@ force = true # whether to ignor evm_version = 'london' # the evm version (by hardfork name) solc_version = '0.8.16' # override for the solc version (setting this ignores `auto_detect_solc`) optimizer = true # enable or disable the solc optimizer +via_ir = true # enable via-ir optimizer_runs = 200 # the number of optimizer runs verbosity = 2 # the verbosity of tests ignored_error_codes = [] # a list of ignored solc error codes diff --git a/contracts/hardhat.config.ts b/contracts/hardhat.config.ts index 7fb93c7c2f..c981e18794 100644 --- a/contracts/hardhat.config.ts +++ b/contracts/hardhat.config.ts @@ -51,6 +51,7 @@ const config: HardhatUserConfig = { enabled: foundry.default?.optimizer || true, runs: foundry.default?.optimizer_runs || 200, }, + viaIR: true, }, }, networks: { From 450cf38888068f563a93e0b6756a888e94373d13 Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 28 Nov 2023 17:37:26 +0200 Subject: [PATCH 21/59] build(coordinator): explicit amd64 platform --- build/dockerfiles/coordinator-api.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build/dockerfiles/coordinator-api.Dockerfile b/build/dockerfiles/coordinator-api.Dockerfile index 4ebdb14540..f9bb81b08a 100644 --- a/build/dockerfiles/coordinator-api.Dockerfile +++ b/build/dockerfiles/coordinator-api.Dockerfile @@ -39,7 +39,7 @@ COPY --from=zkp-builder /app/target/release/libzktrie.so ./coordinator/internal/ RUN cd ./coordinator && make coordinator_api_skip_libzkp && mv ./build/bin/coordinator_api /bin/coordinator_api && mv internal/logic/verifier/lib /bin/ # Pull coordinator into a second stage deploy alpine container -FROM ubuntu:20.04 +FROM --platform=linux/amd64 ubuntu:20.04 ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib # ENV CHAIN_ID=534353 RUN mkdir -p /src/coordinator/internal/logic/verifier/lib From d147e8560b300e95aa10f2034aa160ff44f2e5f9 Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 28 Nov 2023 17:38:05 +0200 Subject: [PATCH 22/59] build(docker): coordinator and rollup image names --- coordinator/Makefile | 4 ++-- rollup/Makefile | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/coordinator/Makefile b/coordinator/Makefile index 893c51aefd..64f8b1dc6a 100644 --- a/coordinator/Makefile +++ b/coordinator/Makefile @@ -50,8 +50,8 @@ clean: ## Empty out the bin folder @rm -rf build/bin docker: - DOCKER_BUILDKIT=1 docker build -t scrolltech/coordinator-api:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/coordinator-api.Dockerfile - DOCKER_BUILDKIT=1 docker build -t scrolltech/coordinator-cron:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/coordinator-cron.Dockerfile + DOCKER_BUILDKIT=1 docker build -t limechain/coordinator-api:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/coordinator-api.Dockerfile + DOCKER_BUILDKIT=1 docker build -t limechain/coordinator-cron:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/coordinator-cron.Dockerfile docker_push: docker push scrolltech/coordinator-api:${IMAGE_VERSION} diff --git a/rollup/Makefile b/rollup/Makefile index e01f608919..dba1999bfe 100644 --- a/rollup/Makefile +++ b/rollup/Makefile @@ -36,6 +36,6 @@ docker_push: docker docker push scrolltech/rollup-relayer:${IMAGE_VERSION} docker: - DOCKER_BUILDKIT=1 docker build -t scrolltech/gas-oracle:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/gas_oracle.Dockerfile - DOCKER_BUILDKIT=1 docker build -t scrolltech/event-watcher:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/event_watcher.Dockerfile - DOCKER_BUILDKIT=1 docker build -t scrolltech/rollup-relayer:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/rollup_relayer.Dockerfile + DOCKER_BUILDKIT=1 docker build -t limechain/gas-oracle:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/gas_oracle.Dockerfile + DOCKER_BUILDKIT=1 docker build -t limechain/event-watcher:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/event_watcher.Dockerfile + DOCKER_BUILDKIT=1 docker build -t limechain/rollup-relayer:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/rollup_relayer.Dockerfile From cfce8b252a5cd96769f0e3ed8d597d295b328541 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 28 Nov 2023 17:56:20 +0200 Subject: [PATCH 23/59] fix: scroll l1 contract --- contracts/src/L1/rollup/ScrollChain.sol | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index 047f014c7f..0260570e3c 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -228,7 +228,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { if (_prevLastAppliedL1Block != 0) { bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( - _prevLastAppliedL1Block, + _prevLastAppliedL1Block + 1, _lastAppliedL1BlockInChunk ); @@ -348,7 +348,6 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // avoid duplicated verification require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified"); - // TODO: add lastAppliedL1Block and l1BlockRangeHash // compute public input hash bytes32 _publicInputHash = keccak256( abi.encodePacked( From e99e26fab39ae3fd0a0baddb48d8636f3e34a357 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 30 Nov 2023 10:21:28 +0200 Subject: [PATCH 24/59] compile(contracts): remove via-ir flag --- contracts/foundry.toml | 1 - contracts/hardhat.config.ts | 1 - 2 files changed, 2 deletions(-) diff --git a/contracts/foundry.toml b/contracts/foundry.toml index da6a644fc0..ca3af0b164 100644 --- a/contracts/foundry.toml +++ b/contracts/foundry.toml @@ -11,7 +11,6 @@ force = true # whether to ignor evm_version = 'london' # the evm version (by hardfork name) solc_version = '0.8.16' # override for the solc version (setting this ignores `auto_detect_solc`) optimizer = true # enable or disable the solc optimizer -via_ir = true # enable via-ir optimizer_runs = 200 # the number of optimizer runs verbosity = 2 # the verbosity of tests ignored_error_codes = [] # a list of ignored solc error codes diff --git a/contracts/hardhat.config.ts b/contracts/hardhat.config.ts index c981e18794..7fb93c7c2f 100644 --- a/contracts/hardhat.config.ts +++ b/contracts/hardhat.config.ts @@ -51,7 +51,6 @@ const config: HardhatUserConfig = { enabled: foundry.default?.optimizer || true, runs: foundry.default?.optimizer_runs || 200, }, - viaIR: true, }, }, networks: { From 6507e1d0296400120c1385f31e4253301ce43e62 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 30 Nov 2023 10:57:10 +0200 Subject: [PATCH 25/59] refactor(contracts/ScrollChain): stack too deep; fix ChunkCode.lastAppliedL1BlockInBlock to return uint64 instead of uint256 --- contracts/src/L1/rollup/ScrollChain.sol | 234 +++++++++++-------- contracts/src/libraries/codec/ChunkCodec.sol | 2 +- 2 files changed, 142 insertions(+), 94 deletions(-) diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index 0260570e3c..b6b1b4519a 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -83,6 +83,25 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { /// @notice The address of L1ViewOracle. address public l1ViewOracle; + // stack too deep + struct CommitChunksResult { + bytes32 dataHash; + uint256 totalL1MessagesPoppedOverall; + uint256 totalL1MessagesPoppedInBatch; + uint64 lastAppliedL1Block; + bytes32 l1BlockRangeHashInBatch; + } + + // stack too deep + struct ChunkResult { + // _totalNumL1MessagesInChunk The total number of L1 messages popped in current chunk + uint256 _totalNumL1MessagesInChunk; + // _lastAppliedL1BlockInChunk The last applied L1 Block Number in current chunk + uint64 _lastAppliedL1BlockInChunk; + // _l1BlockRangeHashInChunk The keccak256 of all the l1 block range hashes in current chunk + bytes32 _l1BlockRangeHashInChunk; + } + /********************** * Function Modifiers * **********************/ @@ -177,8 +196,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { require(_version == 0, "invalid version"); // check whether the batch is empty - uint256 _chunksLength = _chunks.length; - require(_chunksLength > 0, "batch is empty"); + require(_chunks.length > 0, "batch is empty"); // The overall memory layout in this function is organized as follows // +---------------------+-------------------+------------------+ @@ -202,87 +220,39 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { require(committedBatches[_batchIndex] == _parentBatchHash, "incorrect parent batch hash"); require(committedBatches[_batchIndex + 1] == 0, "batch already committed"); - // load `dataPtr` and reserve the memory region for chunk data hashes - uint256 dataPtr; - assembly { - dataPtr := mload(0x40) - mstore(0x40, add(dataPtr, mul(_chunksLength, 32))) - } - - uint64 _lastAppliedL1Block; - uint256 _totalNumL1MessagesInChunk; - uint64 _lastAppliedL1BlockInChunk; - bytes32 _l1BlockRangeHashInChunk; - - // compute the data hash for each chunk - uint256 _totalL1MessagesPoppedInBatch; - bytes32[] memory _l1BlockRangeHashes = new bytes32[](_chunksLength); - for (uint256 i = 0; i < _chunksLength; i++) { - (_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk) = _commitChunk( - dataPtr, - _chunks[i], - _totalL1MessagesPoppedInBatch, - _totalL1MessagesPoppedOverall, - _skippedL1MessageBitmap - ); - - if (_prevLastAppliedL1Block != 0) { - bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( - _prevLastAppliedL1Block + 1, - _lastAppliedL1BlockInChunk - ); - - require(_l1BlockRangeHash == _l1BlockRangeHashInChunk, "incorrect l1 block range hash"); - _l1BlockRangeHashes[i] = _l1BlockRangeHashInChunk; - _prevLastAppliedL1Block = _lastAppliedL1BlockInChunk; - } - - // if it is the last chunk, update the last applied L1 block - if (i == _chunksLength - 1) { - _lastAppliedL1Block = _lastAppliedL1BlockInChunk; - } - - unchecked { - _totalL1MessagesPoppedInBatch += _totalNumL1MessagesInChunk; - _totalL1MessagesPoppedOverall += _totalNumL1MessagesInChunk; - dataPtr += 32; - } - } - - // check the length of bitmap - unchecked { - require( - ((_totalL1MessagesPoppedInBatch + 255) / 256) * 32 == _skippedL1MessageBitmap.length, - "wrong bitmap length" - ); - } + CommitChunksResult memory chunksResult = _commitChunks( + _chunks, + _totalL1MessagesPoppedOverall, + _skippedL1MessageBitmap, + _prevLastAppliedL1Block + ); - // compute the data hash for current batch - bytes32 _dataHash; assembly { - let dataLen := mul(_chunksLength, 0x20) - _dataHash := keccak256(sub(dataPtr, dataLen), dataLen) - batchPtr := mload(0x40) // reset batchPtr _batchIndex := add(_batchIndex, 1) // increase batch index } - bytes32 _l1BlockRangeHashInBatch = keccak256(abi.encodePacked(_l1BlockRangeHashes)); - uint256 _skippedL1MessageBitmapLength = _skippedL1MessageBitmap.length; - // store entries, the order matters BatchHeaderV0Codec.storeVersion(batchPtr, _version); BatchHeaderV0Codec.storeBatchIndex(batchPtr, _batchIndex); - BatchHeaderV0Codec.storeL1MessagePopped(batchPtr, _totalL1MessagesPoppedInBatch); - BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, _totalL1MessagesPoppedOverall); - BatchHeaderV0Codec.storeDataHash(batchPtr, _dataHash); + BatchHeaderV0Codec.storeL1MessagePopped(batchPtr, chunksResult.totalL1MessagesPoppedInBatch); + BatchHeaderV0Codec.storeTotalL1MessagePopped(batchPtr, chunksResult.totalL1MessagesPoppedOverall); + BatchHeaderV0Codec.storeDataHash(batchPtr, chunksResult.dataHash); BatchHeaderV0Codec.storeParentBatchHash(batchPtr, _parentBatchHash); BatchHeaderV0Codec.storeSkippedBitmap(batchPtr, _skippedL1MessageBitmap); - BatchHeaderV0Codec.storeLastAppliedL1Block(batchPtr, _skippedL1MessageBitmapLength, _lastAppliedL1Block); - BatchHeaderV0Codec.storeL1BlockRangeHash(batchPtr, _skippedL1MessageBitmapLength, _l1BlockRangeHashInBatch); + BatchHeaderV0Codec.storeLastAppliedL1Block( + batchPtr, + _skippedL1MessageBitmap.length, + chunksResult.lastAppliedL1Block + ); + BatchHeaderV0Codec.storeL1BlockRangeHash( + batchPtr, + _skippedL1MessageBitmap.length, + chunksResult.l1BlockRangeHashInBatch + ); // compute batch hash - bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 129 + _skippedL1MessageBitmapLength); + bytes32 _batchHash = BatchHeaderV0Codec.computeBatchHash(batchPtr, 129 + _skippedL1MessageBitmap.length); committedBatches[_batchIndex] = _batchHash; emit CommitBatch(_batchIndex, _batchHash); @@ -486,28 +456,96 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { _batchHash = BatchHeaderV0Codec.computeBatchHash(memPtr, _length); } + function _commitChunks( + bytes[] memory _chunks, + uint256 _totalL1MessagesPoppedOverall, + bytes calldata _skippedL1MessageBitmap, + uint64 _prevLastAppliedL1Block + ) internal view returns (CommitChunksResult memory) { + uint256 _chunksLength = _chunks.length; + // load `dataPtr` and reserve the memory region for chunk data hashes + uint256 dataPtr; + assembly { + dataPtr := mload(0x40) + mstore(0x40, add(dataPtr, mul(_chunksLength, 32))) + } + + uint256 _totalL1MessagesPoppedInBatch; + uint64 _lastAppliedL1Block; + bytes32[] memory _l1BlockRangeHashes = new bytes32[](_chunksLength); + + for (uint256 i = 0; i < _chunksLength; i++) { + ChunkResult memory chunkResult = _commitChunk( + dataPtr, + _chunks[i], + _totalL1MessagesPoppedInBatch, + _totalL1MessagesPoppedOverall, + _skippedL1MessageBitmap + ); + + if (_prevLastAppliedL1Block != 0) { + bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( + _prevLastAppliedL1Block + 1, + chunkResult._lastAppliedL1BlockInChunk + ); + + require(_l1BlockRangeHash == chunkResult._l1BlockRangeHashInChunk, "incorrect l1 block range hash"); + _l1BlockRangeHashes[i] = chunkResult._l1BlockRangeHashInChunk; + _prevLastAppliedL1Block = chunkResult._lastAppliedL1BlockInChunk; + } + + // if it is the last chunk, update the last applied L1 block + if (i == _chunksLength - 1) { + _lastAppliedL1Block = chunkResult._lastAppliedL1BlockInChunk; + } + + unchecked { + _totalL1MessagesPoppedInBatch += chunkResult._totalNumL1MessagesInChunk; + _totalL1MessagesPoppedOverall += chunkResult._totalNumL1MessagesInChunk; + dataPtr += 32; + } + } + + // check the length of bitmap + unchecked { + require( + ((_totalL1MessagesPoppedInBatch + 255) / 256) * 32 == _skippedL1MessageBitmap.length, + "wrong bitmap length" + ); + } + + // compute the data hash for current batch + bytes32 _dataHash; + assembly { + let dataLen := mul(_chunksLength, 0x20) + _dataHash := keccak256(sub(dataPtr, dataLen), dataLen) + } + + bytes32 _l1BlockRangeHashInBatch = keccak256(abi.encodePacked(_l1BlockRangeHashes)); + + return + CommitChunksResult({ + dataHash: _dataHash, + totalL1MessagesPoppedOverall: _totalL1MessagesPoppedOverall, + totalL1MessagesPoppedInBatch: _totalL1MessagesPoppedInBatch, + lastAppliedL1Block: _lastAppliedL1Block, + l1BlockRangeHashInBatch: _l1BlockRangeHashInBatch + }); + } + /// @dev Internal function to commit a chunk. /// @param memPtr The start memory offset to store list of `dataHash`. /// @param _chunk The encoded chunk to commit. /// @param _totalL1MessagesPoppedInBatch The total number of L1 messages popped in current batch. /// @param _totalL1MessagesPoppedOverall The total number of L1 messages popped in all batches including current batch. /// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not. - /// @return _totalNumL1MessagesInChunk The total number of L1 message popped in current chunk function _commitChunk( uint256 memPtr, bytes memory _chunk, uint256 _totalL1MessagesPoppedInBatch, uint256 _totalL1MessagesPoppedOverall, bytes calldata _skippedL1MessageBitmap - ) - internal - view - returns ( - uint256 _totalNumL1MessagesInChunk, - uint64 _lastAppliedL1BlockInChunk, - bytes32 _l1BlockRangeHashInChunk - ) - { + ) internal view returns (ChunkResult memory chunkResult) { uint256 chunkPtr; uint256 startDataPtr; uint256 dataPtr; @@ -545,7 +583,6 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { blockPtr := add(chunkPtr, 1) // reset block ptr } - uint256 _lastAppliedL1Block; // concatenate tx hashes uint256 l2TxPtr = ChunkCodec.l2TxPtr(chunkPtr, _numBlocks); while (_numBlocks > 0) { @@ -573,11 +610,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { if (_numBlocks == 1) { // check last block - _lastAppliedL1Block = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); + chunkResult._lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); } unchecked { - _totalNumL1MessagesInChunk += _numL1MessagesInBlock; + chunkResult._totalNumL1MessagesInChunk += _numL1MessagesInBlock; _totalL1MessagesPoppedInBatch += _numL1MessagesInBlock; _totalL1MessagesPoppedOverall += _numL1MessagesInBlock; @@ -586,31 +623,42 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { } } - _lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); - _l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); + // stack too deep + { + uint64 lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); + chunkResult._l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); - require(_lastAppliedL1Block == _lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk"); + require( + lastAppliedL1BlockInChunk == chunkResult._lastAppliedL1BlockInChunk, + "incorrect lastAppliedL1Block in chunk" + ); + } // check the actual number of transactions in the chunk require((dataPtr - txHashStartDataPtr) / 32 <= maxNumTxInChunk, "too many txs in one chunk"); - assembly { - mstore(dataPtr, _lastAppliedL1BlockInChunk) - mstore(dataPtr, _l1BlockRangeHashInChunk) - dataPtr := add(dataPtr, 0x28) - } - // check chunk has correct length. // 40 is the size of lastAppliedL1Block and l1BlockRangeHash. require(l2TxPtr - chunkPtr + 40 == _chunk.length, "incomplete l2 transaction data"); + // stack too deep + { + uint256 _lastAppliedL1BlockInChunk = chunkResult._lastAppliedL1BlockInChunk; + bytes32 _l1BlockRangeHashInChunk = chunkResult._l1BlockRangeHashInChunk; + assembly { + mstore(dataPtr, _lastAppliedL1BlockInChunk) + mstore(dataPtr, _l1BlockRangeHashInChunk) + dataPtr := add(dataPtr, 0x28) + } + } + // compute data hash and store to memory assembly { let dataHash := keccak256(startDataPtr, sub(dataPtr, startDataPtr)) mstore(memPtr, dataHash) } - return (_totalNumL1MessagesInChunk, _lastAppliedL1BlockInChunk, _l1BlockRangeHashInChunk); + return chunkResult; } /// @dev Internal function to load L1 message hashes from the message queue. diff --git a/contracts/src/libraries/codec/ChunkCodec.sol b/contracts/src/libraries/codec/ChunkCodec.sol index a73efaf320..0688ff642c 100644 --- a/contracts/src/libraries/codec/ChunkCodec.sol +++ b/contracts/src/libraries/codec/ChunkCodec.sol @@ -110,7 +110,7 @@ library ChunkCodec { /// @notice Return the number of last applied L1 block. /// @param blockPtr The start memory offset of the block context in memory. /// @return _lastAppliedL1Block The number of last applied L1 block. - function lastAppliedL1BlockInBlock(uint256 blockPtr) internal pure returns (uint256 _lastAppliedL1Block) { + function lastAppliedL1BlockInBlock(uint256 blockPtr) internal pure returns (uint64 _lastAppliedL1Block) { assembly { _lastAppliedL1Block := shr(240, mload(add(blockPtr, 60))) } From 74da93915f846823b0e3a6fe8c08be0da7d90b3a Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 30 Nov 2023 11:22:03 +0200 Subject: [PATCH 26/59] inline-docs(contracts/ScrollChain): commitChunk return argument --- contracts/src/L1/rollup/ScrollChain.sol | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index b6b1b4519a..803cf992ad 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -539,13 +539,15 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { /// @param _totalL1MessagesPoppedInBatch The total number of L1 messages popped in current batch. /// @param _totalL1MessagesPoppedOverall The total number of L1 messages popped in all batches including current batch. /// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not. + /// @return _chunkResult Contains the total number of L1 message popped, the last applied l1 block + /// and the keccak256 of the block range hashes for the current chunk. function _commitChunk( uint256 memPtr, bytes memory _chunk, uint256 _totalL1MessagesPoppedInBatch, uint256 _totalL1MessagesPoppedOverall, bytes calldata _skippedL1MessageBitmap - ) internal view returns (ChunkResult memory chunkResult) { + ) internal view returns (ChunkResult memory _chunkResult) { uint256 chunkPtr; uint256 startDataPtr; uint256 dataPtr; @@ -610,11 +612,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { if (_numBlocks == 1) { // check last block - chunkResult._lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); + _chunkResult._lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); } unchecked { - chunkResult._totalNumL1MessagesInChunk += _numL1MessagesInBlock; + _chunkResult._totalNumL1MessagesInChunk += _numL1MessagesInBlock; _totalL1MessagesPoppedInBatch += _numL1MessagesInBlock; _totalL1MessagesPoppedOverall += _numL1MessagesInBlock; @@ -626,10 +628,10 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // stack too deep { uint64 lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); - chunkResult._l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); + _chunkResult._l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); require( - lastAppliedL1BlockInChunk == chunkResult._lastAppliedL1BlockInChunk, + lastAppliedL1BlockInChunk == _chunkResult._lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk" ); } @@ -643,8 +645,8 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // stack too deep { - uint256 _lastAppliedL1BlockInChunk = chunkResult._lastAppliedL1BlockInChunk; - bytes32 _l1BlockRangeHashInChunk = chunkResult._l1BlockRangeHashInChunk; + uint256 _lastAppliedL1BlockInChunk = _chunkResult._lastAppliedL1BlockInChunk; + bytes32 _l1BlockRangeHashInChunk = _chunkResult._l1BlockRangeHashInChunk; assembly { mstore(dataPtr, _lastAppliedL1BlockInChunk) mstore(dataPtr, _l1BlockRangeHashInChunk) @@ -658,7 +660,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { mstore(memPtr, dataHash) } - return chunkResult; + return _chunkResult; } /// @dev Internal function to load L1 message hashes from the message queue. From 0616d6cd82a3fd88cf1e1e5427e01e9ca954472d Mon Sep 17 00:00:00 2001 From: failfmi Date: Fri, 1 Dec 2023 11:47:09 +0200 Subject: [PATCH 27/59] docker: switch push org name --- coordinator/Makefile | 4 ++-- rollup/Makefile | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/coordinator/Makefile b/coordinator/Makefile index 64f8b1dc6a..4362fc9426 100644 --- a/coordinator/Makefile +++ b/coordinator/Makefile @@ -54,5 +54,5 @@ docker: DOCKER_BUILDKIT=1 docker build -t limechain/coordinator-cron:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/coordinator-cron.Dockerfile docker_push: - docker push scrolltech/coordinator-api:${IMAGE_VERSION} - docker push scrolltech/coordinator-cron:${IMAGE_VERSION} \ No newline at end of file + docker push limechain/coordinator-api:${IMAGE_VERSION} + docker push limechain/coordinator-cron:${IMAGE_VERSION} \ No newline at end of file diff --git a/rollup/Makefile b/rollup/Makefile index dba1999bfe..e865cb0924 100644 --- a/rollup/Makefile +++ b/rollup/Makefile @@ -31,9 +31,9 @@ clean: ## Empty out the bin folder @rm -rf build/bin docker_push: - docker docker push scrolltech/gas-oracle:${IMAGE_VERSION} - docker docker push scrolltech/event-watcher:${IMAGE_VERSION} - docker docker push scrolltech/rollup-relayer:${IMAGE_VERSION} + docker push limechain/gas-oracle:${IMAGE_VERSION} + docker push limechain/event-watcher:${IMAGE_VERSION} + docker push limechain/rollup-relayer:${IMAGE_VERSION} docker: DOCKER_BUILDKIT=1 docker build -t limechain/gas-oracle:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/gas_oracle.Dockerfile From aa05f80a74e47c9942fe7fe51e120661141e6504 Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 6 Dec 2023 11:18:53 +0200 Subject: [PATCH 28/59] fix(rollup): batch l1 block range hash --- rollup/internal/orm/batch.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rollup/internal/orm/batch.go b/rollup/internal/orm/batch.go index 60b11d40ef..20057f1da6 100644 --- a/rollup/internal/orm/batch.go +++ b/rollup/internal/orm/batch.go @@ -35,7 +35,7 @@ type Batch struct { BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"` LastAppliedL1Block uint64 `json:"last_applied_l1_block"` - L1BlockRangeHash common.Hash `json:"l1_block_range_hash"` + L1BlockRangeHash string `json:"l1_block_range_hash"` // proof ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"` @@ -293,7 +293,7 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet TotalL1CommitGas: batchMeta.TotalL1CommitGas, TotalL1CommitCalldataSize: batchMeta.TotalL1CommitCalldataSize, LastAppliedL1Block: chunks[numChunks-1].LastAppliedL1Block, - L1BlockRangeHash: batchHeader.L1BlockRangeHash(), + L1BlockRangeHash: batchHeader.L1BlockRangeHash().Hex(), } db := o.db From 02ed8527f9dbe6928e7464cf256f74a8c801cb8d Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 6 Dec 2023 13:52:34 +0200 Subject: [PATCH 29/59] build(docker/rollup-relayer): explicit platform --- build/dockerfiles/rollup_relayer.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build/dockerfiles/rollup_relayer.Dockerfile b/build/dockerfiles/rollup_relayer.Dockerfile index 952f45f674..8197130d41 100644 --- a/build/dockerfiles/rollup_relayer.Dockerfile +++ b/build/dockerfiles/rollup_relayer.Dockerfile @@ -20,7 +20,7 @@ RUN --mount=target=. \ cd /src/rollup/cmd/rollup_relayer/ && go build -v -p 4 -o /bin/rollup_relayer # Pull rollup_relayer into a second stage deploy alpine container -FROM alpine:latest +FROM --platform=linux/amd64 alpine:latest COPY --from=builder /bin/rollup_relayer /bin/ From 16ff8180910fdd7a904292405d08ef7fbc269fc9 Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 6 Dec 2023 16:45:01 +0200 Subject: [PATCH 30/59] feat(rollup): use forked go-ethereum --- go.work.sum | 16 +++++++++++++++- rollup/go.mod | 2 ++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/go.work.sum b/go.work.sum index f0b658d87c..766f509ff1 100644 --- a/go.work.sum +++ b/go.work.sum @@ -526,6 +526,8 @@ github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3 github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= +github.com/limechain/scroll-go-ethereum v0.0.0-20231128150717-8a4a05ac3f85 h1:kjmwYeYgZuw2P/wLFlYJJnTLO4H4a4kRsrj7I4Fwjag= +github.com/limechain/scroll-go-ethereum v0.0.0-20231128150717-8a4a05ac3f85/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= @@ -585,7 +587,6 @@ github.com/opencontainers/runc v1.1.7/go.mod h1:CbUumNnWCuTGFukNXahoo/RFBZvDAgRh github.com/ory/dockertest/v3 v3.9.1 h1:v4dkG+dlu76goxMiTT2j8zV7s4oPPEppKT8K8p2f1kY= github.com/ory/dockertest/v3 v3.9.1/go.mod h1:42Ir9hmvaAPm0Mgibk6mBPi7SFvTXxEcnztDYOJ//uM= github.com/ory/dockertest/v3 v3.10.0/go.mod h1:nr57ZbRWMqfsdGdFNLHz5jjNdDb7VVFnzAeW1n5N1Lg= -github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= github.com/paulbellamy/ratecounter v0.2.0 h1:2L/RhJq+HA8gBQImDXtLPrDXK5qAj6ozWVK/zFXVJGs= github.com/paulmach/orb v0.7.1 h1:Zha++Z5OX/l168sqHK3k4z18LDvr+YAO/VjK0ReQ9rU= github.com/paulmach/orb v0.7.1/go.mod h1:FWRlTgl88VI1RBx/MkrwWDRhQ96ctqMCh8boXhmqB/A= @@ -730,6 +731,8 @@ golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= @@ -760,12 +763,17 @@ golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.16.0 h1:7eBu7KsSvFDtSXUIDbh3aqlK4DPsZ1rByC8PFfBThos= +golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.3.0 h1:6l90koy8/LaBLmLu8jpHeHexzMwEita0zFfYlggy2F8= golang.org/x/oauth2 v0.3.0/go.mod h1:rQrIauxkUhJ6CuwEXwymO2/eh4xz2ZWF1nBkcxS+tGk= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ= +golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -792,14 +800,20 @@ golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= diff --git a/rollup/go.mod b/rollup/go.mod index f576892cf4..0669218fe1 100644 --- a/rollup/go.mod +++ b/rollup/go.mod @@ -86,3 +86,5 @@ require ( gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) + +replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum 8a4a05ac3f85dd674f96df83181105d47c0a76d0 From a0d59121f9cd9c74bf9eaaeaace1dd5eeaa7c2ba Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 6 Dec 2023 17:28:49 +0200 Subject: [PATCH 31/59] fix(contracts/l1vieworacle): remove from check --- contracts/src/L1/L1ViewOracle.sol | 1 - 1 file changed, 1 deletion(-) diff --git a/contracts/src/L1/L1ViewOracle.sol b/contracts/src/L1/L1ViewOracle.sol index abaae0e767..0eeb1d5646 100644 --- a/contracts/src/L1/L1ViewOracle.sol +++ b/contracts/src/L1/L1ViewOracle.sol @@ -12,7 +12,6 @@ contract L1ViewOracle is IL1ViewOracle { * @return hash_ The keccak hash of all blockhashes in the provided range. */ function blockRangeHash(uint256 _from, uint256 _to) external view returns (bytes32 hash_) { - require(_from > 0, "Incorrect from/to range"); require(_to >= _from, "Incorrect from/to range"); require(_to < block.number, "Incorrect from/to range"); From dbeed30aa8951a61871c28f49263c70ed6c2fcb2 Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 6 Dec 2023 17:29:22 +0200 Subject: [PATCH 32/59] feat(rollup): log chunk process block range --- rollup/internal/controller/watcher/chunk_proposer.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rollup/internal/controller/watcher/chunk_proposer.go b/rollup/internal/controller/watcher/chunk_proposer.go index a48a0334a8..4a2ed0f883 100644 --- a/rollup/internal/controller/watcher/chunk_proposer.go +++ b/rollup/internal/controller/watcher/chunk_proposer.go @@ -242,7 +242,7 @@ func (p *ChunkProposer) proposeChunk(parentChunk *orm.Chunk) (*types.Chunk, erro l1BlockRangeHash, err := p.GetL1BlockRangeHash(p.ctx, l1BlockRangeHashFrom, lastAppliedL1Block) if err != nil { - log.Error("failed to get l1 block range hash", "err", err) + log.Error("failed to get l1 block range hash", "from", l1BlockRangeHashFrom, "to", lastAppliedL1Block, "err", err) return nil, fmt.Errorf("chunk-proposer failed to get l1 block range hash error: %w", err) } From ce160122806bf2a515a1421ea01972e3bd7e5ab7 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 7 Dec 2023 11:15:22 +0200 Subject: [PATCH 33/59] fix(rollup/chunk-proposer): blockRangeHash parse result --- rollup/internal/controller/watcher/chunk_proposer.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/rollup/internal/controller/watcher/chunk_proposer.go b/rollup/internal/controller/watcher/chunk_proposer.go index 4a2ed0f883..25dad018b0 100644 --- a/rollup/internal/controller/watcher/chunk_proposer.go +++ b/rollup/internal/controller/watcher/chunk_proposer.go @@ -390,11 +390,17 @@ func (p *ChunkProposer) GetL1BlockRangeHash(ctx context.Context, from uint64, to } } - var l1BlockRangeHash common.Hash - err = p.l1ViewOracleABI.UnpackIntoInterface(l1BlockRangeHash, "blockRangeHash", output) + result, err := p.l1ViewOracleABI.Unpack("blockRangeHash", output) if err != nil { return nil, err } + b, ok := result[0].([32]byte) + if !ok { + return nil, fmt.Errorf("could not cast block range hash to [32]byte") + } + + l1BlockRangeHash := common.Hash(b) + return &l1BlockRangeHash, nil } From 7fafc28d05615f58e3cca56bc5d02ef79de5afe9 Mon Sep 17 00:00:00 2001 From: reo101 Date: Thu, 7 Dec 2023 17:56:46 +0200 Subject: [PATCH 34/59] feat(rollup)!: rebuild `ScrollChain` contract ABI --- rollup/abi/bridge_abi.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rollup/abi/bridge_abi.go b/rollup/abi/bridge_abi.go index 30e02cf717..8eea1f0d6f 100644 --- a/rollup/abi/bridge_abi.go +++ b/rollup/abi/bridge_abi.go @@ -71,7 +71,7 @@ func init() { // ScrollChainMetaData contains all meta data concerning the ScrollChain contract. var ScrollChainMetaData = &bind.MetaData{ - ABI: "[{\"inputs\":[{\"internalType\":\"uint64\",\"name\":\"_chainId\",\"type\":\"uint64\"}],\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"batchIndex\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"batchHash\",\"type\":\"bytes32\"}],\"name\":\"CommitBatch\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"batchIndex\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"batchHash\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"stateRoot\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"withdrawRoot\",\"type\":\"bytes32\"}],\"name\":\"FinalizeBatch\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint8\",\"name\":\"version\",\"type\":\"uint8\"}],\"name\":\"Initialized\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"previousOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"newOwner\",\"type\":\"address\"}],\"name\":\"OwnershipTransferred\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"}],\"name\":\"Paused\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"batchIndex\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"batchHash\",\"type\":\"bytes32\"}],\"name\":\"RevertBatch\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"}],\"name\":\"Unpaused\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"oldMaxNumTxInChunk\",\"type\":\"uint256\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"newMaxNumTxInChunk\",\"type\":\"uint256\"}],\"name\":\"UpdateMaxNumTxInChunk\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"status\",\"type\":\"bool\"}],\"name\":\"UpdateProver\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"status\",\"type\":\"bool\"}],\"name\":\"UpdateSequencer\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"oldVerifier\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"newVerifier\",\"type\":\"address\"}],\"name\":\"UpdateVerifier\",\"type\":\"event\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"addProver\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"addSequencer\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint8\",\"name\":\"_version\",\"type\":\"uint8\"},{\"internalType\":\"bytes\",\"name\":\"_parentBatchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes[]\",\"name\":\"_chunks\",\"type\":\"bytes[]\"},{\"internalType\":\"bytes\",\"name\":\"_skippedL1MessageBitmap\",\"type\":\"bytes\"}],\"name\":\"commitBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"committedBatches\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes32\",\"name\":\"_prevStateRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"_postStateRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"_withdrawRoot\",\"type\":\"bytes32\"}],\"name\":\"finalizeBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes32\",\"name\":\"_prevStateRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"_postStateRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"_withdrawRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes\",\"name\":\"_aggrProof\",\"type\":\"bytes\"}],\"name\":\"finalizeBatchWithProof\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"finalizedStateRoots\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes32\",\"name\":\"_stateRoot\",\"type\":\"bytes32\"}],\"name\":\"importGenesisBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_messageQueue\",\"type\":\"address\"},{\"internalType\":\"address\",\"name\":\"_verifier\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"_maxNumTxInChunk\",\"type\":\"uint256\"}],\"name\":\"initialize\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_batchIndex\",\"type\":\"uint256\"}],\"name\":\"isBatchFinalized\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"name\":\"isProver\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"name\":\"isSequencer\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"lastFinalizedBatchIndex\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"layer2ChainId\",\"outputs\":[{\"internalType\":\"uint64\",\"name\":\"\",\"type\":\"uint64\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"maxNumTxInChunk\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"messageQueue\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"paused\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"removeProver\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"removeSequencer\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"renounceOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"uint256\",\"name\":\"_count\",\"type\":\"uint256\"}],\"name\":\"revertBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bool\",\"name\":\"_status\",\"type\":\"bool\"}],\"name\":\"setPause\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"newOwner\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_maxNumTxInChunk\",\"type\":\"uint256\"}],\"name\":\"updateMaxNumTxInChunk\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_newVerifier\",\"type\":\"address\"}],\"name\":\"updateVerifier\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"verifier\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"withdrawRoots\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"}]", + ABI: "[{\"inputs\":[{\"internalType\":\"uint64\",\"name\":\"_chainId\",\"type\":\"uint64\"}],\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"batchIndex\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"batchHash\",\"type\":\"bytes32\"}],\"name\":\"CommitBatch\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"batchIndex\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"batchHash\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"stateRoot\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"bytes32\",\"name\":\"withdrawRoot\",\"type\":\"bytes32\"}],\"name\":\"FinalizeBatch\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint8\",\"name\":\"version\",\"type\":\"uint8\"}],\"name\":\"Initialized\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"previousOwner\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"newOwner\",\"type\":\"address\"}],\"name\":\"OwnershipTransferred\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"}],\"name\":\"Paused\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"batchIndex\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"batchHash\",\"type\":\"bytes32\"}],\"name\":\"RevertBatch\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"}],\"name\":\"Unpaused\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"oldMaxNumTxInChunk\",\"type\":\"uint256\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"newMaxNumTxInChunk\",\"type\":\"uint256\"}],\"name\":\"UpdateMaxNumTxInChunk\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"status\",\"type\":\"bool\"}],\"name\":\"UpdateProver\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"account\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"status\",\"type\":\"bool\"}],\"name\":\"UpdateSequencer\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"oldVerifier\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"newVerifier\",\"type\":\"address\"}],\"name\":\"UpdateVerifier\",\"type\":\"event\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"addProver\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"addSequencer\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint8\",\"name\":\"_version\",\"type\":\"uint8\"},{\"internalType\":\"bytes\",\"name\":\"_parentBatchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes[]\",\"name\":\"_chunks\",\"type\":\"bytes[]\"},{\"internalType\":\"bytes\",\"name\":\"_skippedL1MessageBitmap\",\"type\":\"bytes\"},{\"internalType\":\"uint64\",\"name\":\"_prevLastAppliedL1Block\",\"type\":\"uint64\"}],\"name\":\"commitBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"committedBatches\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes32\",\"name\":\"_prevStateRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"_postStateRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes32\",\"name\":\"_withdrawRoot\",\"type\":\"bytes32\"},{\"internalType\":\"bytes\",\"name\":\"_aggrProof\",\"type\":\"bytes\"}],\"name\":\"finalizeBatchWithProof\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"finalizedStateRoots\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"bytes32\",\"name\":\"_stateRoot\",\"type\":\"bytes32\"}],\"name\":\"importGenesisBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_messageQueue\",\"type\":\"address\"},{\"internalType\":\"address\",\"name\":\"_verifier\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"_maxNumTxInChunk\",\"type\":\"uint256\"},{\"internalType\":\"address\",\"name\":\"_l1ViewOracle\",\"type\":\"address\"}],\"name\":\"initialize\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_batchIndex\",\"type\":\"uint256\"}],\"name\":\"isBatchFinalized\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"name\":\"isProver\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"name\":\"isSequencer\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"l1ViewOracle\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"lastFinalizedBatchIndex\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"layer2ChainId\",\"outputs\":[{\"internalType\":\"uint64\",\"name\":\"\",\"type\":\"uint64\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"maxNumTxInChunk\",\"outputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"messageQueue\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"paused\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"removeProver\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_account\",\"type\":\"address\"}],\"name\":\"removeSequencer\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"renounceOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes\",\"name\":\"_batchHeader\",\"type\":\"bytes\"},{\"internalType\":\"uint256\",\"name\":\"_count\",\"type\":\"uint256\"}],\"name\":\"revertBatch\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bool\",\"name\":\"_status\",\"type\":\"bool\"}],\"name\":\"setPause\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"newOwner\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"_maxNumTxInChunk\",\"type\":\"uint256\"}],\"name\":\"updateMaxNumTxInChunk\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"_newVerifier\",\"type\":\"address\"}],\"name\":\"updateVerifier\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"verifier\",\"outputs\":[{\"internalType\":\"address\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"withdrawRoots\",\"outputs\":[{\"internalType\":\"bytes32\",\"name\":\"\",\"type\":\"bytes32\"}],\"stateMutability\":\"view\",\"type\":\"function\"}]", } // L1ScrollMessengerMetaData contains all meta data concerning the L1ScrollMessenger contract. From 72fe2d2b82bd64289f086a68ffe83a48b2022912 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 12 Dec 2023 11:11:15 +0200 Subject: [PATCH 35/59] test: fix test --- contracts/src/test/ScrollChain.t.sol | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/contracts/src/test/ScrollChain.t.sol b/contracts/src/test/ScrollChain.t.sol index 7fbddd0e4f..155badca59 100644 --- a/contracts/src/test/ScrollChain.t.sol +++ b/contracts/src/test/ScrollChain.t.sol @@ -53,7 +53,7 @@ contract ScrollChainTest is DSTestPlus { } function testCommitBatch() public { - bytes memory batchHeader0 = new bytes(89); + bytes memory batchHeader0 = new bytes(129); // import 10 L1 messages for (uint256 i = 0; i < 10; i++) { @@ -87,13 +87,13 @@ contract ScrollChainTest is DSTestPlus { // batch header length too small, revert hevm.startPrank(address(0)); hevm.expectRevert("batch header length too small"); - rollup.commitBatch(0, new bytes(88), new bytes[](1), new bytes(0), 0); + rollup.commitBatch(0, new bytes(128), new bytes[](1), new bytes(0), 0); hevm.stopPrank(); // wrong bitmap length, revert hevm.startPrank(address(0)); hevm.expectRevert("wrong bitmap length"); - rollup.commitBatch(0, new bytes(90), new bytes[](1), new bytes(0), 0); + rollup.commitBatch(0, new bytes(130), new bytes[](1), new bytes(0), 0); hevm.stopPrank(); // incorrect parent batch hash, revert @@ -129,7 +129,7 @@ contract ScrollChainTest is DSTestPlus { hevm.stopPrank(); // cannot skip last L1 message, revert - chunk0 = new bytes(1 + 60); + chunk0 = new bytes(1 + 108); bytes memory bitmap = new bytes(32); chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunk0[58] = bytes1(uint8(1)); // numTransactions = 1 @@ -142,7 +142,7 @@ contract ScrollChainTest is DSTestPlus { hevm.stopPrank(); // num txs less than num L1 msgs, revert - chunk0 = new bytes(1 + 60); + chunk0 = new bytes(1 + 108); bitmap = new bytes(32); chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunk0[58] = bytes1(uint8(1)); // numTransactions = 1 @@ -155,7 +155,7 @@ contract ScrollChainTest is DSTestPlus { hevm.stopPrank(); // incomplete l2 transaction data, revert - chunk0 = new bytes(1 + 60 + 1); + chunk0 = new bytes(1 + 108 + 1); chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); @@ -164,7 +164,7 @@ contract ScrollChainTest is DSTestPlus { hevm.stopPrank(); // commit batch with one chunk, no tx, correctly - chunk0 = new bytes(1 + 60); + chunk0 = new bytes(1 + 108); chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); @@ -539,12 +539,12 @@ contract ScrollChainTest is DSTestPlus { // caller not owner, revert hevm.startPrank(address(1)); hevm.expectRevert("Ownable: caller is not the owner"); - rollup.revertBatch(new bytes(89), 1); + rollup.revertBatch(new bytes(129), 1); hevm.stopPrank(); rollup.addSequencer(address(0)); - bytes memory batchHeader0 = new bytes(89); + bytes memory batchHeader0 = new bytes(129); // import genesis batch assembly { @@ -557,14 +557,14 @@ contract ScrollChainTest is DSTestPlus { bytes memory chunk0; // commit one batch - chunk0 = new bytes(1 + 60); + chunk0 = new bytes(1 + 108); chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 0); hevm.stopPrank(); - bytes memory batchHeader1 = new bytes(89); + bytes memory batchHeader1 = new bytes(129); assembly { mstore(add(batchHeader1, 0x20), 0) // version mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex From 2678c844f42e76d3e3de20dd0cf1be0e93e6655b Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 13 Dec 2023 14:15:36 +0200 Subject: [PATCH 36/59] fix(types/chunk): encode --- common/types/chunk.go | 4 +++- common/types/chunk_test.go | 12 ++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/common/types/chunk.go b/common/types/chunk.go index 415d793939..c495130698 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -79,7 +79,9 @@ func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { chunkBytes = append(chunkBytes, l2TxDataBytes...) - binary.BigEndian.PutUint64(chunkBytes, c.LastAppliedL1Block) + var lastAppliedL1BlockBytes [8]byte + binary.BigEndian.PutUint64(lastAppliedL1BlockBytes[:], c.LastAppliedL1Block) + chunkBytes = append(chunkBytes, lastAppliedL1BlockBytes[:]...) chunkBytes = append(chunkBytes, c.L1BlockRangeHash.Bytes()...) return chunkBytes, nil diff --git a/common/types/chunk_test.go b/common/types/chunk_test.go index 2fc8f5f7d4..8a0a54fcd4 100644 --- a/common/types/chunk_test.go +++ b/common/types/chunk_test.go @@ -50,8 +50,8 @@ func TestChunkEncode(t *testing.T) { bytes, err = chunk.Encode(0) hexString := hex.EncodeToString(bytes) assert.NoError(t, err) - assert.Equal(t, 339, len(bytes)) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000000000000355418d1e818400020000000000000000000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10000000000000000000000000000000000000000000000000000000000000000", hexString) + assert.Equal(t, 347, len(bytes)) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000000000000355418d1e818400020000000000000000000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f100000000000000000000000000000000000000000000000000000000000000000000000000000000", hexString) // Test case 4: when the chunk contains one block with 1 L1MsgTx templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -72,8 +72,8 @@ func TestChunkEncode(t *testing.T) { bytes, err = chunk.Encode(0) hexString = hex.EncodeToString(bytes) assert.NoError(t, err) - assert.Equal(t, 137, len(bytes)) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b000000000000000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080800000000000000000000000000000000000000000000000000000000000000000", hexString) + assert.Equal(t, 145, len(bytes)) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b000000000000000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000000000000000000000000000000000000000000000000000000000000000000000000000000", hexString) // Test case 5: when the chunk contains two blocks each with 1 L1MsgTx // TODO: revise this test, we cannot reuse the same L1MsgTx twice @@ -88,8 +88,8 @@ func TestChunkEncode(t *testing.T) { bytes, err = chunk.Encode(0) hexString = hex.EncodeToString(bytes) assert.NoError(t, err) - assert.Equal(t, 241, len(bytes)) - assert.Equal(t, "02000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b0000000000000000000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a120000010000000000000000000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080800000000000000000000000000000000000000000000000000000000000000000", hexString) + assert.Equal(t, 249, len(bytes)) + assert.Equal(t, "02000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b0000000000000000000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a120000010000000000000000000000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808000000000000000000000000000000000000000000000000000000000000000000000000000000000", hexString) } func TestChunkHash(t *testing.T) { From e9a7c861d3d74b3398f6e7d3451a58ca2ee63240 Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 13 Dec 2023 14:16:30 +0200 Subject: [PATCH 37/59] fix(types/chunk): exclude l1 block hashes --- common/types/chunk.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/common/types/chunk.go b/common/types/chunk.go index c495130698..1f0c501c9f 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -63,7 +63,8 @@ func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { // Append rlp-encoded l2Txs for _, txData := range block.Transactions { - if txData.Type == types.L1MessageTxType { + // TODO(l1blockhashes): Check if necessary + if txData.Type == types.L1MessageTxType || txData.Type == types.L1BlockHashesTxType { continue } rlpTxData, err := convertTxDataToRLPEncoding(txData) @@ -112,7 +113,8 @@ func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) (common.Hash, error) { if err != nil { return common.Hash{}, err } - if txData.Type == types.L1MessageTxType { + // TODO(l1blockhashes): Check if necessary + if txData.Type == types.L1MessageTxType || txData.Type == types.L1BlockHashesTxType { l1TxHashes = append(l1TxHashes, hashBytes...) } else { l2TxHashes = append(l2TxHashes, hashBytes...) From d2eac6e833872920fa475c0f76b1bc6f5468db9a Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 13 Dec 2023 14:56:19 +0200 Subject: [PATCH 38/59] fix(types/batch): encode/decode --- common/types/batch_header.go | 3 +-- common/types/batch_header_test.go | 16 +++++++++------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/common/types/batch_header.go b/common/types/batch_header.go index dc5a895c1d..75baff7bf1 100644 --- a/common/types/batch_header.go +++ b/common/types/batch_header.go @@ -160,7 +160,6 @@ func (b *BatchHeader) Encode() []byte { binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) copy(batchBytes[25:], b.dataHash[:]) copy(batchBytes[57:], b.parentBatchHash[:]) - copy(batchBytes[57:], b.parentBatchHash[:]) copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) binary.BigEndian.PutUint64(batchBytes[89+len(b.skippedL1MessageBitmap):], b.lastAppliedL1Block) copy(batchBytes[97+len(b.skippedL1MessageBitmap):], b.l1BlockRangeHash[:]) @@ -174,7 +173,7 @@ func (b *BatchHeader) Hash() common.Hash { // DecodeBatchHeader attempts to decode the given byte slice into a BatchHeader. func DecodeBatchHeader(data []byte) (*BatchHeader, error) { - if len(data) < 97 { + if len(data) < 129 { return nil, fmt.Errorf("insufficient data for BatchHeader") } b := &BatchHeader{ diff --git a/common/types/batch_header_test.go b/common/types/batch_header_test.go index c0de7575b1..da3b0d5a92 100644 --- a/common/types/batch_header_test.go +++ b/common/types/batch_header_test.go @@ -150,8 +150,8 @@ func TestBatchHeaderEncode(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader) bytes := batchHeader.Encode() - assert.Equal(t, 89, len(bytes)) - assert.Equal(t, "0100000000000000010000000000000000000000000000000010a64c9bd905f8caf5d668fbda622d6558c5a42cdb4b3895709743d159c22e534136709aabc8a23aa17fbcc833da2f7857d3c2884feec9aae73429c135f94985", common.Bytes2Hex(bytes)) + assert.Equal(t, 129, len(bytes)) + assert.Equal(t, "0100000000000000010000000000000000000000000000000010a64c9bd905f8caf5d668fbda622d6558c5a42cdb4b3895709743d159c22e537afdc2ea6f8daaa4b430ce1424f59bcec401d00e34a99b1da457babc405a86070000000000000000290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563", common.Bytes2Hex(bytes)) // With L1 Msg templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -168,8 +168,8 @@ func TestBatchHeaderEncode(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader) bytes = batchHeader.Encode() - assert.Equal(t, 121, len(bytes)) - assert.Equal(t, "010000000000000001000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca4136709aabc8a23aa17fbcc833da2f7857d3c2884feec9aae73429c135f9498500000000000000000000000000000000000000000000000000000000000003ff", common.Bytes2Hex(bytes)) + assert.Equal(t, 161, len(bytes)) + assert.Equal(t, "010000000000000001000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca7afdc2ea6f8daaa4b430ce1424f59bcec401d00e34a99b1da457babc405a860700000000000000000000000000000000000000000000000000000000000003ff0000000000000000290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563", common.Bytes2Hex(bytes)) } func TestBatchHeaderHash(t *testing.T) { @@ -197,7 +197,7 @@ func TestBatchHeaderHash(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader) hash := batchHeader.Hash() - assert.Equal(t, "d69da4357da0073f4093c76e49f077e21bb52f48f57ee3e1fbd9c38a2881af81", common.Bytes2Hex(hash.Bytes())) + assert.Equal(t, "e5131040ff2c5c0dafc629651452e3c6d84e2a5512e883cc94a3ca1677fb5d5e", common.Bytes2Hex(hash.Bytes())) templateBlockTrace, err = os.ReadFile("../testdata/blockTrace_03.json") assert.NoError(t, err) @@ -213,7 +213,7 @@ func TestBatchHeaderHash(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader2) hash2 := batchHeader2.Hash() - assert.Equal(t, "34de600163aa745d4513113137a5b54960d13f0d3f2849e490c4b875028bf930", common.Bytes2Hex(hash2.Bytes())) + assert.Equal(t, "922db89ce8a8e3e202d43ca70e59b9277c1f0d90c72daed7270896f410abb3ac", common.Bytes2Hex(hash2.Bytes())) // With L1 Msg templateBlockTrace3, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -230,7 +230,7 @@ func TestBatchHeaderHash(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader) hash = batchHeader.Hash() - assert.Equal(t, "1c3007880f0eafe74572ede7d164ff1ee5376e9ac9bff6f7fb837b2630cddc9a", common.Bytes2Hex(hash.Bytes())) + assert.Equal(t, "438ed7f9d8d5a312b5eab7527789c7c1fbb26c9b2700e5f4ce0facd7824bd5ba", common.Bytes2Hex(hash.Bytes())) } func TestBatchHeaderDecode(t *testing.T) { @@ -242,6 +242,8 @@ func TestBatchHeaderDecode(t *testing.T) { dataHash: common.HexToHash("0x01"), parentBatchHash: common.HexToHash("0x02"), skippedL1MessageBitmap: []byte{0x01, 0x02, 0x03}, + lastAppliedL1Block: 5, + l1BlockRangeHash: common.HexToHash("438ed7f9d8d5a312b5eab7527789c7c1fbb26c9b2700e5f4ce0facd7824bd5ba"), } encoded := header.Encode() From 3dd1a5d9c61c681c76188d10900720435bb28c7a Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 19 Dec 2023 10:07:45 +0200 Subject: [PATCH 39/59] fix(contracts/ScrollChain): batch and chunk ptr loadings --- contracts/src/L1/rollup/ScrollChain.sol | 29 +++++++++---------- .../libraries/codec/BatchHeaderV0Codec.sol | 5 ++-- contracts/src/libraries/codec/ChunkCodec.sol | 6 ++-- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index 803cf992ad..0ebb95fe17 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -483,16 +483,14 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { _skippedL1MessageBitmap ); - if (_prevLastAppliedL1Block != 0) { - bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( - _prevLastAppliedL1Block + 1, - chunkResult._lastAppliedL1BlockInChunk - ); - - require(_l1BlockRangeHash == chunkResult._l1BlockRangeHashInChunk, "incorrect l1 block range hash"); - _l1BlockRangeHashes[i] = chunkResult._l1BlockRangeHashInChunk; - _prevLastAppliedL1Block = chunkResult._lastAppliedL1BlockInChunk; - } + bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( + _prevLastAppliedL1Block + 1, + chunkResult._lastAppliedL1BlockInChunk + ); + + require(_l1BlockRangeHash == chunkResult._l1BlockRangeHashInChunk, "incorrect l1 block range hash"); + _l1BlockRangeHashes[i] = chunkResult._l1BlockRangeHashInChunk; + _prevLastAppliedL1Block = chunkResult._lastAppliedL1BlockInChunk; // if it is the last chunk, update the last applied L1 block if (i == _chunksLength - 1) { @@ -628,12 +626,12 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // stack too deep { uint64 lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); - _chunkResult._l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); - require( lastAppliedL1BlockInChunk == _chunkResult._lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk" ); + + _chunkResult._l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); } // check the actual number of transactions in the chunk @@ -645,12 +643,13 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // stack too deep { - uint256 _lastAppliedL1BlockInChunk = _chunkResult._lastAppliedL1BlockInChunk; + uint64 _lastAppliedL1BlockInChunk = _chunkResult._lastAppliedL1BlockInChunk; bytes32 _l1BlockRangeHashInChunk = _chunkResult._l1BlockRangeHashInChunk; assembly { - mstore(dataPtr, _lastAppliedL1BlockInChunk) + mstore(dataPtr, shl(192, _lastAppliedL1BlockInChunk)) + dataPtr := add(dataPtr, 0x8) mstore(dataPtr, _l1BlockRangeHashInChunk) - dataPtr := add(dataPtr, 0x28) + dataPtr := add(dataPtr, 0x20) } } diff --git a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol index a867ef710c..86978f73ba 100644 --- a/contracts/src/libraries/codec/BatchHeaderV0Codec.sol +++ b/contracts/src/libraries/codec/BatchHeaderV0Codec.sol @@ -215,7 +215,8 @@ library BatchHeaderV0Codec { uint256 _lastAppliedL1Block ) internal pure { assembly { - mstore(add(batchPtr, _skippedL1MessageBitmapLength), shl(224, _lastAppliedL1Block)) + batchPtr := add(batchPtr, 89) + mstore(add(batchPtr, _skippedL1MessageBitmapLength), shl(192, _lastAppliedL1Block)) } } @@ -229,7 +230,7 @@ library BatchHeaderV0Codec { bytes32 _l1BlockRangeHash ) internal pure { assembly { - batchPtr := add(batchPtr, 8) + batchPtr := add(batchPtr, 97) mstore(add(batchPtr, _skippedL1MessageBitmapLength), _l1BlockRangeHash) } } diff --git a/contracts/src/libraries/codec/ChunkCodec.sol b/contracts/src/libraries/codec/ChunkCodec.sol index 0688ff642c..f190c6fc46 100644 --- a/contracts/src/libraries/codec/ChunkCodec.sol +++ b/contracts/src/libraries/codec/ChunkCodec.sol @@ -69,7 +69,7 @@ library ChunkCodec { /// @return _lastAppliedL1Block The number of last applied L1 block. function lastAppliedL1BlockInChunk(uint256 l2TxEndPtr) internal pure returns (uint64 _lastAppliedL1Block) { assembly { - _lastAppliedL1Block := shr(248, mload(l2TxEndPtr)) + _lastAppliedL1Block := shr(192, mload(l2TxEndPtr)) } } @@ -78,7 +78,7 @@ library ChunkCodec { /// @return _l1BlockRangeHash The hash of the L1 block range. function l1BlockRangeHashInChunk(uint256 l2TxEndPtr) internal pure returns (bytes32 _l1BlockRangeHash) { assembly { - _l1BlockRangeHash := shr(224, mload(add(l2TxEndPtr, 8))) + _l1BlockRangeHash := mload(add(l2TxEndPtr, 8)) } } @@ -112,7 +112,7 @@ library ChunkCodec { /// @return _lastAppliedL1Block The number of last applied L1 block. function lastAppliedL1BlockInBlock(uint256 blockPtr) internal pure returns (uint64 _lastAppliedL1Block) { assembly { - _lastAppliedL1Block := shr(240, mload(add(blockPtr, 60))) + _lastAppliedL1Block := shr(192, mload(add(blockPtr, 60))) } } From f951b9b26741318a74ce4db035e689261b517220 Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 19 Dec 2023 10:09:50 +0200 Subject: [PATCH 40/59] fix(types/chunk): missing l1 block hashes info in Hash --- common/types/chunk.go | 5 +++++ common/types/chunk_test.go | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/common/types/chunk.go b/common/types/chunk.go index 1f0c501c9f..e5b31035f9 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -124,6 +124,11 @@ func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) (common.Hash, error) { dataBytes = append(dataBytes, l2TxHashes...) } + var lastAppliedL1BlockBytes [8]byte + binary.BigEndian.PutUint64(lastAppliedL1BlockBytes[:], c.LastAppliedL1Block) + dataBytes = append(dataBytes, lastAppliedL1BlockBytes[:]...) + dataBytes = append(dataBytes, c.L1BlockRangeHash.Bytes()...) + hash := crypto.Keccak256Hash(dataBytes) return hash, nil } diff --git a/common/types/chunk_test.go b/common/types/chunk_test.go index 8a0a54fcd4..7361830422 100644 --- a/common/types/chunk_test.go +++ b/common/types/chunk_test.go @@ -114,7 +114,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0x78c839dfc494396c16b40946f32b3f4c3e8c2d4bfd04aefcf235edec474482f8", hash.Hex()) + assert.Equal(t, "0x215cea67726a5655e1689ea984b88660e6be5ab700cb0f15d2ac769339879c6b", hash.Hex()) // Test case 3: successfully hashing a chunk on two blocks templateBlockTrace1, err := os.ReadFile("../testdata/blockTrace_03.json") @@ -129,7 +129,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0x8d71fbbc486f745ff46ca5d1c0f18ab1f1a1b488e88708034b57d6a1d7fb04ed", hash.Hex()) + assert.Equal(t, "0xaca17d4cae322f15537a4b770332171478991fd8b6a158327205ae4596cc838c", hash.Hex()) // Test case 4: successfully hashing a chunk on two blocks each with L1 and L2 txs templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -144,7 +144,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0x6a47de75ba15fdefa5c8f63a43715f633a0f9559cf07e8bd164ac0cae80300cb", hash.Hex()) + assert.Equal(t, "0x811b8fd798aacaaeb9857257d5053aabdc65293f301e58916c24b85b11b9db95", hash.Hex()) } func TestErrorPaths(t *testing.T) { From ab0cba57567ea87b25c7dd08d45ccc5bcd580616 Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 19 Dec 2023 17:47:30 +0200 Subject: [PATCH 41/59] fix(contracts/ScrollChain): handle chunks, which have the same lastAppliedL1BlockNumber as previous --- contracts/src/L1/rollup/ScrollChain.sol | 48 ++++---- contracts/src/test/ScrollChain.t.sol | 154 ++++++++++++++++-------- 2 files changed, 129 insertions(+), 73 deletions(-) diff --git a/contracts/src/L1/rollup/ScrollChain.sol b/contracts/src/L1/rollup/ScrollChain.sol index 0ebb95fe17..d012f83c2b 100644 --- a/contracts/src/L1/rollup/ScrollChain.sol +++ b/contracts/src/L1/rollup/ScrollChain.sol @@ -94,12 +94,12 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // stack too deep struct ChunkResult { - // _totalNumL1MessagesInChunk The total number of L1 messages popped in current chunk - uint256 _totalNumL1MessagesInChunk; - // _lastAppliedL1BlockInChunk The last applied L1 Block Number in current chunk - uint64 _lastAppliedL1BlockInChunk; - // _l1BlockRangeHashInChunk The keccak256 of all the l1 block range hashes in current chunk - bytes32 _l1BlockRangeHashInChunk; + // totalNumL1MessagesInChunk The total number of L1 messages popped in current chunk + uint256 totalNumL1MessagesInChunk; + // lastAppliedL1BlockInChunk The last applied L1 Block Number in current chunk + uint64 lastAppliedL1BlockInChunk; + // l1BlockRangeHashInChunk The keccak256 of all the l1 block range hashes in current chunk + bytes32 l1BlockRangeHashInChunk; } /********************** @@ -483,23 +483,27 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { _skippedL1MessageBitmap ); - bytes32 _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( - _prevLastAppliedL1Block + 1, - chunkResult._lastAppliedL1BlockInChunk - ); + // TODO(l1blockhashes): revise + bytes32 _l1BlockRangeHash = 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470; // keccak256("") + if (_prevLastAppliedL1Block != chunkResult.lastAppliedL1BlockInChunk) { + _l1BlockRangeHash = IL1ViewOracle(l1ViewOracle).blockRangeHash( + _prevLastAppliedL1Block + 1, + chunkResult.lastAppliedL1BlockInChunk + ); + } - require(_l1BlockRangeHash == chunkResult._l1BlockRangeHashInChunk, "incorrect l1 block range hash"); - _l1BlockRangeHashes[i] = chunkResult._l1BlockRangeHashInChunk; - _prevLastAppliedL1Block = chunkResult._lastAppliedL1BlockInChunk; + require(_l1BlockRangeHash == chunkResult.l1BlockRangeHashInChunk, "incorrect l1 block range hash"); + _l1BlockRangeHashes[i] = chunkResult.l1BlockRangeHashInChunk; + _prevLastAppliedL1Block = chunkResult.lastAppliedL1BlockInChunk; // if it is the last chunk, update the last applied L1 block if (i == _chunksLength - 1) { - _lastAppliedL1Block = chunkResult._lastAppliedL1BlockInChunk; + _lastAppliedL1Block = chunkResult.lastAppliedL1BlockInChunk; } unchecked { - _totalL1MessagesPoppedInBatch += chunkResult._totalNumL1MessagesInChunk; - _totalL1MessagesPoppedOverall += chunkResult._totalNumL1MessagesInChunk; + _totalL1MessagesPoppedInBatch += chunkResult.totalNumL1MessagesInChunk; + _totalL1MessagesPoppedOverall += chunkResult.totalNumL1MessagesInChunk; dataPtr += 32; } } @@ -610,11 +614,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { if (_numBlocks == 1) { // check last block - _chunkResult._lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); + _chunkResult.lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInBlock(blockPtr); } unchecked { - _chunkResult._totalNumL1MessagesInChunk += _numL1MessagesInBlock; + _chunkResult.totalNumL1MessagesInChunk += _numL1MessagesInBlock; _totalL1MessagesPoppedInBatch += _numL1MessagesInBlock; _totalL1MessagesPoppedOverall += _numL1MessagesInBlock; @@ -627,11 +631,11 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { { uint64 lastAppliedL1BlockInChunk = ChunkCodec.lastAppliedL1BlockInChunk(l2TxPtr); require( - lastAppliedL1BlockInChunk == _chunkResult._lastAppliedL1BlockInChunk, + lastAppliedL1BlockInChunk == _chunkResult.lastAppliedL1BlockInChunk, "incorrect lastAppliedL1Block in chunk" ); - _chunkResult._l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); + _chunkResult.l1BlockRangeHashInChunk = ChunkCodec.l1BlockRangeHashInChunk(l2TxPtr); } // check the actual number of transactions in the chunk @@ -643,8 +647,8 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain { // stack too deep { - uint64 _lastAppliedL1BlockInChunk = _chunkResult._lastAppliedL1BlockInChunk; - bytes32 _l1BlockRangeHashInChunk = _chunkResult._l1BlockRangeHashInChunk; + uint64 _lastAppliedL1BlockInChunk = _chunkResult.lastAppliedL1BlockInChunk; + bytes32 _l1BlockRangeHashInChunk = _chunkResult.l1BlockRangeHashInChunk; assembly { mstore(dataPtr, shl(192, _lastAppliedL1BlockInChunk)) dataPtr := add(dataPtr, 0x8) diff --git a/contracts/src/test/ScrollChain.t.sol b/contracts/src/test/ScrollChain.t.sol index 155badca59..0bf9aa85af 100644 --- a/contracts/src/test/ScrollChain.t.sol +++ b/contracts/src/test/ScrollChain.t.sol @@ -165,6 +165,9 @@ contract ScrollChainTest is DSTestPlus { // commit batch with one chunk, no tx, correctly chunk0 = new bytes(1 + 108); + assembly { + mstore(add(chunk0, add(0x20, 77)), 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470) + } chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); @@ -187,7 +190,7 @@ contract ScrollChainTest is DSTestPlus { rollup.addProver(address(0)); rollup.addSequencer(address(0)); - bytes memory batchHeader0 = new bytes(89); + bytes memory batchHeader0 = new bytes(129); // import genesis batch assembly { @@ -200,7 +203,10 @@ contract ScrollChainTest is DSTestPlus { bytes memory chunk0; // commit one batch - chunk0 = new bytes(1 + 60); + chunk0 = new bytes(1 + 108); + assembly { + mstore(add(chunk0, add(0x20, 77)), 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470) // l1BlockRangeHash keccak256("") + } chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); @@ -208,14 +214,16 @@ contract ScrollChainTest is DSTestPlus { hevm.stopPrank(); assertGt(uint256(rollup.committedBatches(1)), 0); - bytes memory batchHeader1 = new bytes(89); + bytes memory batchHeader1 = new bytes(129); assembly { mstore(add(batchHeader1, 0x20), 0) // version mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex mstore(add(batchHeader1, add(0x20, 9)), 0) // l1MessagePopped mstore(add(batchHeader1, add(0x20, 17)), 0) // totalL1MessagePopped - mstore(add(batchHeader1, add(0x20, 25)), 0x246394445f4fe64ed5598554d55d1682d6fb3fe04bf58eb54ef81d1189fafb51) // dataHash + mstore(add(batchHeader1, add(0x20, 25)), 0x17181f6abf48415097856d36591857cb134a3fe04b3aaf5e4ee8e82ec478f9cf) // dataHash mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash + mstore(add(batchHeader1, add(0x20, 89)), 0) // lastAppliedL1Block + mstore(add(batchHeader1, add(0x20, 97)), 0x10ca3eff73ebec87d2394fc58560afeab86dac7a21f5e402ea0a55e5c8a6758f) // blockRangeHash } // incorrect batch hash, revert @@ -230,7 +238,7 @@ contract ScrollChainTest is DSTestPlus { hevm.startPrank(address(0)); hevm.expectRevert("batch header length too small"); rollup.finalizeBatchWithProof( - new bytes(88), + new bytes(128), bytes32(uint256(1)), bytes32(uint256(2)), bytes32(0), @@ -242,7 +250,7 @@ contract ScrollChainTest is DSTestPlus { hevm.startPrank(address(0)); hevm.expectRevert("wrong bitmap length"); rollup.finalizeBatchWithProof( - new bytes(90), + new bytes(130), bytes32(uint256(1)), bytes32(uint256(2)), bytes32(0), @@ -289,13 +297,15 @@ contract ScrollChainTest is DSTestPlus { rollup.addSequencer(address(0)); rollup.addProver(address(0)); + hevm.roll(2); + // import 300 L1 messages for (uint256 i = 0; i < 300; i++) { messageQueue.appendCrossDomainMessage(address(this), 1000000, new bytes(0)); } // import genesis batch first - bytes memory batchHeader0 = new bytes(89); + bytes memory batchHeader0 = new bytes(129); assembly { mstore(add(batchHeader0, add(0x20, 25)), 1) } @@ -309,53 +319,67 @@ contract ScrollChainTest is DSTestPlus { // commit batch1, one chunk with one block, 1 tx, 1 L1 message, no skip // => payload for data hash of chunk0 - // 0000000000000000 - // 0000000000000000 - // 0000000000000000000000000000000000000000000000000000000000000000 - // 0000000000000000 - // 0001 - // a2277fd30bbbe74323309023b56035b376d7768ad237ae4fc46ead7dc9591ae1 + // 0000000000000000 - blockContext 0 - blockNumber + // 0000000000000000 - blockContext 0 - timestamp + // 0000000000000000000000000000000000000000000000000000000000000000 - blockContext 0 - baseFee + // 0000000000000000 - blockContext 0 - gasLimit + // 0001 - blockContext 0 - numTransactions + // 0001 - blockContext 0 - numL1Messages + // 0000000000000001 - blockContext 0 - lastAppliedL1Block + // a2277fd30bbbe74323309023b56035b376d7768ad237ae4fc46ead7dc9591ae1 - L1 Message Tx Hash + // 0000000000000001 - chunk 0 - lastAppliedL1Block + // b10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6 - chunk 0 - l1BlockRangeHash // => data hash for chunk0 - // 9ef1e5694bdb014a1eea42be756a8f63bfd8781d6332e9ef3b5126d90c62f110 + // 7cf07190f6882a8027e86d92f4b37e53f1c22867c58aa7db008d80a864aa7908 // => data hash for all chunks - // d9cb6bf9264006fcea490d5c261f7453ab95b1b26033a3805996791b8e3a62f3 + // 86fcdd7b593809d108dae8a3a696e5ae4af774943f15cc2fd3c39cd02dabd0d7 // => payload for batch header // 00 // 0000000000000001 // 0000000000000001 // 0000000000000001 - // d9cb6bf9264006fcea490d5c261f7453ab95b1b26033a3805996791b8e3a62f3 - // 119b828c2a2798d2c957228ebeaff7e10bb099ae0d4e224f3eeb779ff61cba61 + // 86fcdd7b593809d108dae8a3a696e5ae4af774943f15cc2fd3c39cd02dabd0d7 + // 743dab51a4c73747185caad9effa81411a067f3d7aa69d69d4b7f3e9802a71c4 // 0000000000000000000000000000000000000000000000000000000000000000 + // 0000000000000001 + // b10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6 // => hash for batch header - // 00847173b29b238cf319cde79512b7c213e5a8b4138daa7051914c4592b6dfc7 - bytes memory batchHeader1 = new bytes(89 + 32); + // b6448e0bbd8226646f2099cd47160478768e5ccc32b486189073dfcedbad34e3 + bytes memory batchHeader1 = new bytes(129 + 32); assembly { mstore(add(batchHeader1, 0x20), 0) // version mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex = 1 mstore(add(batchHeader1, add(0x20, 9)), shl(192, 1)) // l1MessagePopped = 1 mstore(add(batchHeader1, add(0x20, 17)), shl(192, 1)) // totalL1MessagePopped = 1 - mstore(add(batchHeader1, add(0x20, 25)), 0xd9cb6bf9264006fcea490d5c261f7453ab95b1b26033a3805996791b8e3a62f3) // dataHash + mstore(add(batchHeader1, add(0x20, 25)), 0x86fcdd7b593809d108dae8a3a696e5ae4af774943f15cc2fd3c39cd02dabd0d7) // dataHash mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash mstore(add(batchHeader1, add(0x20, 89)), 0) // bitmap0 + mstore(add(batchHeader1, add(0x20, 121)), shl(192, 1)) // lastAppliedL1Block + mstore( + add(batchHeader1, add(0x20, 129)), + 0xb5d9d894133a730aa651ef62d26b0ffa846233c74177a591a4a896adfda97d22 + ) // blockRangeHash } - chunk0 = new bytes(1 + 60); + chunk0 = new bytes(1 + 108); assembly { mstore(add(chunk0, 0x20), shl(248, 1)) // numBlocks = 1 - mstore(add(chunk0, add(0x21, 56)), shl(240, 1)) // numTransactions = 1 - mstore(add(chunk0, add(0x21, 58)), shl(240, 1)) // numL1Messages = 1 + mstore(add(chunk0, add(0x21, 56)), shl(240, 1)) // numTransactions = 1, block 0 + mstore(add(chunk0, add(0x21, 58)), shl(240, 1)) // numL1Messages = 1, block 0 + mstore(add(chunk0, add(0x21, 60)), shl(192, 1)) // lastAppliedL1Block = 1, block 0 + mstore(add(chunk0, add(0x20, 69)), shl(192, 1)) // lastAppliedL1Block = 1, chunk 0 + mstore(add(chunk0, add(0x20, 77)), 0xb10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6) // blockRangeHash } chunks = new bytes[](1); chunks[0] = chunk0; bitmap = new bytes(32); hevm.startPrank(address(0)); hevm.expectEmit(true, true, false, true); - emit CommitBatch(1, bytes32(0x00847173b29b238cf319cde79512b7c213e5a8b4138daa7051914c4592b6dfc7)); + emit CommitBatch(1, bytes32(0xb6448e0bbd8226646f2099cd47160478768e5ccc32b486189073dfcedbad34e3)); rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); hevm.stopPrank(); assertBoolEq(rollup.isBatchFinalized(1), false); bytes32 batchHash1 = rollup.committedBatches(1); - assertEq(batchHash1, bytes32(0x00847173b29b238cf319cde79512b7c213e5a8b4138daa7051914c4592b6dfc7)); + assertEq(batchHash1, bytes32(0xb6448e0bbd8226646f2099cd47160478768e5ccc32b486189073dfcedbad34e3)); // finalize batch1 hevm.startPrank(address(0)); @@ -384,7 +408,11 @@ contract ScrollChainTest is DSTestPlus { // 0000000000000000000000000000000000000000000000000000000000000000 // 0000000000000000 // 0003 + // 0000 + // 0000000000000001 - blockContext 0 - lastAppliedL1Block // ... (some tx hashes) + // 0000000000000001 - chunk 0 - lastAppliedL1Block + // b10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6 - chunk 0 - l1BlockRangeHash // => data hash for chunk0 // 2ac1dad3f3696e5581dfc10f2c7a7a8fc5b344285f7d332c7895a8825fca609a // 2. chunk1 has three blocks @@ -411,57 +439,76 @@ contract ScrollChainTest is DSTestPlus { // => data hash for chunk2 // e1276f58354ab2372050bde30d8c970ccc3728c76e97f37deebeee83ecbf5705 // => data hash for all chunks - // 3c71d155351642d15f1542a1543ce423abeca1f8939100a0a34cdc3127b95f69 + // 3de87c00834353063966bbb378e76de5956c1d15e8c218f907196f71426ebdec // => payload for batch header // 00 // 0000000000000002 // 0000000000000108 // 0000000000000109 - // 3c71d155351642d15f1542a1543ce423abeca1f8939100a0a34cdc3127b95f69 + // 3de87c00834353063966bbb378e76de5956c1d15e8c218f907196f71426ebdec // cef70bf80683c4d9b8b2813e90c314e8c56648e231300b8cfed9d666b0caf14e // aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa800000000000000000000000000000000000000000000000000000000000000aa + // 0000000000000001 - lastAppliedL1Block + // b10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6 - l1BlockRangeHash // => hash for batch header - // 03a9cdcb9d582251acf60937db006ec99f3505fd4751b7c1f92c9a8ef413e873 - bytes memory batchHeader2 = new bytes(89 + 32 + 32); + // 86dd7e3d438fe6f08b4dc82dfb7fc4e025cd252d789d71a8a5865c5169e8d5df + bytes memory batchHeader2 = new bytes(129 + 32 + 32); assembly { mstore(add(batchHeader2, 0x20), 0) // version mstore(add(batchHeader2, add(0x20, 1)), shl(192, 2)) // batchIndex = 2 mstore(add(batchHeader2, add(0x20, 9)), shl(192, 264)) // l1MessagePopped = 264 mstore(add(batchHeader2, add(0x20, 17)), shl(192, 265)) // totalL1MessagePopped = 265 - mstore(add(batchHeader2, add(0x20, 25)), 0x3c71d155351642d15f1542a1543ce423abeca1f8939100a0a34cdc3127b95f69) // dataHash + mstore(add(batchHeader2, add(0x20, 25)), 0x3de87c00834353063966bbb378e76de5956c1d15e8c218f907196f71426ebdec) // dataHash mstore(add(batchHeader2, add(0x20, 57)), batchHash1) // parentBatchHash mstore( add(batchHeader2, add(0x20, 89)), 77194726158210796949047323339125271902179989777093709359638389338608753093160 ) // bitmap0 mstore(add(batchHeader2, add(0x20, 121)), 42) // bitmap1 + mstore(add(batchHeader2, add(0x20, 153)), shl(192, 1)) // lastAppliedL1Block = 1 + mstore( + add(batchHeader2, add(0x20, 161)), + 0xad51961b5d4726f7c7501e5a50c32465739873a32d54b6c4fbb4f01c7263e6c0 + ) // blockRangeHash } - chunk0 = new bytes(1 + 60 + 3 * 5); + chunk0 = new bytes(1 + 108 + 3 * 5); assembly { mstore(add(chunk0, 0x20), shl(248, 1)) // numBlocks = 1 mstore(add(chunk0, add(0x21, 56)), shl(240, 3)) // numTransactions = 3 mstore(add(chunk0, add(0x21, 58)), shl(240, 0)) // numL1Messages = 0 + mstore(add(chunk0, add(0x21, 60)), shl(192, 1)) // lastAppliedL1Block = 1, block 0 } for (uint256 i = 0; i < 3; i++) { assembly { - mstore(add(chunk0, add(93, mul(i, 5))), shl(224, 1)) // tx = "0x00" + mstore(add(chunk0, add(101, mul(i, 5))), shl(224, 1)) // tx = "0x00" } } - chunk1 = new bytes(1 + 60 * 3 + 51 * 5); + assembly { + mstore(add(chunk0, 116), shl(192, 1)) // lastAppliedL1Block = 1, chunk 0 + mstore(add(chunk0, 124), 0xb10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6) // l1BlockRangeHash, chunk 0 + } + chunk1 = new bytes(1 + 68 * 3 + 51 * 5 + 40); assembly { mstore(add(chunk1, 0x20), shl(248, 3)) // numBlocks = 3 mstore(add(chunk1, add(33, 56)), shl(240, 5)) // block0.numTransactions = 5 mstore(add(chunk1, add(33, 58)), shl(240, 3)) // block0.numL1Messages = 3 - mstore(add(chunk1, add(93, 56)), shl(240, 10)) // block1.numTransactions = 10 - mstore(add(chunk1, add(93, 58)), shl(240, 5)) // block1.numL1Messages = 5 - mstore(add(chunk1, add(153, 56)), shl(240, 300)) // block1.numTransactions = 300 - mstore(add(chunk1, add(153, 58)), shl(240, 256)) // block1.numL1Messages = 256 + mstore(add(chunk1, add(33, 60)), shl(192, 1)) // lastAppliedL1Block = 1, block 0 + mstore(add(chunk1, add(101, 56)), shl(240, 10)) // block1.numTransactions = 10 + mstore(add(chunk1, add(101, 58)), shl(240, 5)) // block1.numL1Messages = 5 + mstore(add(chunk1, add(101, 60)), shl(192, 1)) // lastAppliedL1Block = 1, block 1 + mstore(add(chunk1, add(169, 56)), shl(240, 300)) // block1.numTransactions = 300 + mstore(add(chunk1, add(169, 58)), shl(240, 256)) // block1.numL1Messages = 256 + mstore(add(chunk1, add(169, 60)), shl(192, 1)) // lastAppliedL1Block = 1, block 2 } for (uint256 i = 0; i < 51; i++) { assembly { - mstore(add(chunk1, add(213, mul(i, 5))), shl(224, 1)) // tx = "0x00" + mstore(add(chunk1, add(237, mul(i, 5))), shl(224, 1)) // tx = "0x00" } } + assembly { + mstore(add(chunk1, 492), shl(192, 1)) // lastAppliedL1Block = 1, chunk 1 + mstore(add(chunk1, 500), 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470) // l1BlockRangeHash, chunk 1 + } chunks = new bytes[](2); chunks[0] = chunk0; chunks[1] = chunk1; @@ -489,12 +536,12 @@ contract ScrollChainTest is DSTestPlus { rollup.updateMaxNumTxInChunk(186); hevm.startPrank(address(0)); hevm.expectEmit(true, true, false, true); - emit CommitBatch(2, bytes32(0x03a9cdcb9d582251acf60937db006ec99f3505fd4751b7c1f92c9a8ef413e873)); + emit CommitBatch(2, bytes32(0x86dd7e3d438fe6f08b4dc82dfb7fc4e025cd252d789d71a8a5865c5169e8d5df)); rollup.commitBatch(0, batchHeader1, chunks, bitmap, 0); hevm.stopPrank(); assertBoolEq(rollup.isBatchFinalized(2), false); bytes32 batchHash2 = rollup.committedBatches(2); - assertEq(batchHash2, bytes32(0x03a9cdcb9d582251acf60937db006ec99f3505fd4751b7c1f92c9a8ef413e873)); + assertEq(batchHash2, bytes32(0x86dd7e3d438fe6f08b4dc82dfb7fc4e025cd252d789d71a8a5865c5169e8d5df)); // verify committed batch correctly hevm.startPrank(address(0)); @@ -558,6 +605,9 @@ contract ScrollChainTest is DSTestPlus { // commit one batch chunk0 = new bytes(1 + 108); + assembly { + mstore(add(chunk0, add(0x20, 77)), 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470) // l1BlockRangeHash keccak256("") + } chunk0[0] = bytes1(uint8(1)); // one block in this chunk chunks[0] = chunk0; hevm.startPrank(address(0)); @@ -570,8 +620,10 @@ contract ScrollChainTest is DSTestPlus { mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex mstore(add(batchHeader1, add(0x20, 9)), 0) // l1MessagePopped mstore(add(batchHeader1, add(0x20, 17)), 0) // totalL1MessagePopped - mstore(add(batchHeader1, add(0x20, 25)), 0x246394445f4fe64ed5598554d55d1682d6fb3fe04bf58eb54ef81d1189fafb51) // dataHash + mstore(add(batchHeader1, add(0x20, 25)), 0x17181f6abf48415097856d36591857cb134a3fe04b3aaf5e4ee8e82ec478f9cf) // dataHash mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash + mstore(add(batchHeader1, add(0x20, 89)), 0) // lastAppliedL1Block + mstore(add(batchHeader1, add(0x20, 97)), 0x10ca3eff73ebec87d2394fc58560afeab86dac7a21f5e402ea0a55e5c8a6758f) // blockRangeHash } // commit another batch @@ -727,57 +779,57 @@ contract ScrollChainTest is DSTestPlus { bytes memory batchHeader; // zero state root, revert - batchHeader = new bytes(89); + batchHeader = new bytes(129); hevm.expectRevert("zero state root"); rollup.importGenesisBatch(batchHeader, bytes32(0)); // batch header length too small, revert - batchHeader = new bytes(88); + batchHeader = new bytes(128); hevm.expectRevert("batch header length too small"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); // wrong bitmap length, revert - batchHeader = new bytes(90); + batchHeader = new bytes(130); hevm.expectRevert("wrong bitmap length"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); // not all fields are zero, revert - batchHeader = new bytes(89); + batchHeader = new bytes(129); batchHeader[0] = bytes1(uint8(1)); // version not zero hevm.expectRevert("not all fields are zero"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); - batchHeader = new bytes(89); + batchHeader = new bytes(129); batchHeader[1] = bytes1(uint8(1)); // batchIndex not zero hevm.expectRevert("not all fields are zero"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); - batchHeader = new bytes(89 + 32); + batchHeader = new bytes(129 + 32); assembly { mstore(add(batchHeader, add(0x20, 9)), shl(192, 1)) // l1MessagePopped not zero } hevm.expectRevert("not all fields are zero"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); - batchHeader = new bytes(89); + batchHeader = new bytes(129); batchHeader[17] = bytes1(uint8(1)); // totalL1MessagePopped not zero hevm.expectRevert("not all fields are zero"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); // zero data hash, revert - batchHeader = new bytes(89); + batchHeader = new bytes(129); hevm.expectRevert("zero data hash"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); // nonzero parent batch hash, revert - batchHeader = new bytes(89); + batchHeader = new bytes(129); batchHeader[25] = bytes1(uint8(1)); // dataHash not zero batchHeader[57] = bytes1(uint8(1)); // parentBatchHash not zero hevm.expectRevert("nonzero parent batch hash"); rollup.importGenesisBatch(batchHeader, bytes32(uint256(1))); // import correctly - batchHeader = new bytes(89); + batchHeader = new bytes(129); batchHeader[25] = bytes1(uint8(1)); // dataHash not zero assertEq(rollup.finalizedStateRoots(0), bytes32(0)); assertEq(rollup.withdrawRoots(0), bytes32(0)); From 5459003a59e420a5d83c0313e98a2ac41af68a48 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 21 Dec 2023 18:18:56 +0200 Subject: [PATCH 42/59] fix(rollup): incorrect chunk and wrapped block lastAppliedL1BlockNum and blockRangeHash --- .../internal/controller/relayer/l2_relayer.go | 2 +- .../controller/relayer/l2_relayer_test.go | 4 +- .../controller/watcher/chunk_proposer.go | 56 +++++++++---------- .../internal/controller/watcher/l2_watcher.go | 39 +++++++++---- rollup/internal/orm/batch.go | 2 +- rollup/internal/orm/chunk.go | 19 ++++--- rollup/internal/orm/l2_block.go | 16 +++++- 7 files changed, 87 insertions(+), 51 deletions(-) diff --git a/rollup/internal/controller/relayer/l2_relayer.go b/rollup/internal/controller/relayer/l2_relayer.go index 4965a1c85b..32cfa4b416 100644 --- a/rollup/internal/controller/relayer/l2_relayer.go +++ b/rollup/internal/controller/relayer/l2_relayer.go @@ -177,7 +177,7 @@ func (r *Layer2Relayer) initializeGenesis() error { err = r.db.Transaction(func(dbTX *gorm.DB) error { var dbChunk *orm.Chunk - dbChunk, err = r.chunkOrm.InsertChunk(r.ctx, nil, chunk, dbTX) + dbChunk, err = r.chunkOrm.InsertChunk(r.ctx, chunk, dbTX) if err != nil { return fmt.Errorf("failed to insert chunk: %v", err) } diff --git a/rollup/internal/controller/relayer/l2_relayer_test.go b/rollup/internal/controller/relayer/l2_relayer_test.go index 030b495749..5b46be7946 100644 --- a/rollup/internal/controller/relayer/l2_relayer_test.go +++ b/rollup/internal/controller/relayer/l2_relayer_test.go @@ -55,9 +55,9 @@ func testL2RelayerProcessPendingBatches(t *testing.T) { err = l2BlockOrm.InsertL2Blocks(context.Background(), []*types.WrappedBlock{wrappedBlock1, wrappedBlock2}) assert.NoError(t, err) chunkOrm := orm.NewChunk(db) - dbChunk1, err := chunkOrm.InsertChunk(context.Background(), nil, chunk1) + dbChunk1, err := chunkOrm.InsertChunk(context.Background(), chunk1) assert.NoError(t, err) - dbChunk2, err := chunkOrm.InsertChunk(context.Background(), dbChunk1, chunk2) + dbChunk2, err := chunkOrm.InsertChunk(context.Background(), chunk2) assert.NoError(t, err) batchMeta := &types.BatchMeta{ StartChunkIndex: 0, diff --git a/rollup/internal/controller/watcher/chunk_proposer.go b/rollup/internal/controller/watcher/chunk_proposer.go index 25dad018b0..0d795771fd 100644 --- a/rollup/internal/controller/watcher/chunk_proposer.go +++ b/rollup/internal/controller/watcher/chunk_proposer.go @@ -167,34 +167,52 @@ func NewChunkProposer(ctx context.Context, client *ethclient.Client, cfg *config // TryProposeChunk tries to propose a new chunk. func (p *ChunkProposer) TryProposeChunk() { + p.chunkProposerCircleTotal.Inc() + proposedChunk, err := p.proposeChunk() + if err != nil { + p.proposeChunkFailureTotal.Inc() + log.Error("propose new chunk failed", "err", err) + return + } + if proposedChunk == nil { + return + } + + lastAppliedL1Block := proposedChunk.Blocks[len(proposedChunk.Blocks)-1].LastAppliedL1Block + parentChunk, err := p.chunkOrm.GetLatestChunk(p.ctx) if err != nil && !errors.Is(errors.Unwrap(err), gorm.ErrRecordNotFound) { log.Error("failed to get latest chunk", "err", err) return } - p.chunkProposerCircleTotal.Inc() - proposedChunk, err := p.proposeChunk(parentChunk) - if err != nil { - p.proposeChunkFailureTotal.Inc() - log.Error("propose new chunk failed", "err", err) - return + l1BlockRangeHash := common.HexToHash("0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") // keccak256("") + if parentChunk.LastAppliedL1Block != lastAppliedL1Block { + hash, err := p.GetL1BlockRangeHash(p.ctx, parentChunk.LastAppliedL1Block+1, lastAppliedL1Block) + if err != nil { + log.Error("failed to get l1 block range hash", "from", parentChunk.LastAppliedL1Block+1, "to", lastAppliedL1Block, "err", err) + return + } + l1BlockRangeHash = *hash } - if err := p.updateChunkInfoInDB(parentChunk, proposedChunk); err != nil { + proposedChunk.LastAppliedL1Block = lastAppliedL1Block + proposedChunk.L1BlockRangeHash = l1BlockRangeHash + + if err := p.updateChunkInfoInDB(proposedChunk); err != nil { p.proposeChunkUpdateInfoFailureTotal.Inc() log.Error("update chunk info in orm failed", "err", err) } } -func (p *ChunkProposer) updateChunkInfoInDB(parentChunk *orm.Chunk, chunk *types.Chunk) error { +func (p *ChunkProposer) updateChunkInfoInDB(chunk *types.Chunk) error { if chunk == nil { return nil } p.proposeChunkUpdateInfoTotal.Inc() err := p.db.Transaction(func(dbTX *gorm.DB) error { - dbChunk, err := p.chunkOrm.InsertChunk(p.ctx, parentChunk, chunk, dbTX) + dbChunk, err := p.chunkOrm.InsertChunk(p.ctx, chunk, dbTX) if err != nil { log.Warn("ChunkProposer.InsertChunk failed", "chunk hash", chunk.Hash) return err @@ -208,7 +226,7 @@ func (p *ChunkProposer) updateChunkInfoInDB(parentChunk *orm.Chunk, chunk *types return err } -func (p *ChunkProposer) proposeChunk(parentChunk *orm.Chunk) (*types.Chunk, error) { +func (p *ChunkProposer) proposeChunk() (*types.Chunk, error) { unchunkedBlockHeight, err := p.chunkOrm.GetUnchunkedBlockHeight(p.ctx) if err != nil { return nil, err @@ -230,24 +248,6 @@ func (p *ChunkProposer) proposeChunk(parentChunk *orm.Chunk) (*types.Chunk, erro var totalL1CommitCalldataSize uint64 var totalL1CommitGas uint64 crc := chunkRowConsumption{} - lastAppliedL1Block := blocks[len(blocks)-1].LastAppliedL1Block - var l1BlockRangeHashFrom uint64 - - if parentChunk != nil { - l1BlockRangeHashFrom = parentChunk.LastAppliedL1Block - if l1BlockRangeHashFrom != 0 { - l1BlockRangeHashFrom++ - } - } - - l1BlockRangeHash, err := p.GetL1BlockRangeHash(p.ctx, l1BlockRangeHashFrom, lastAppliedL1Block) - if err != nil { - log.Error("failed to get l1 block range hash", "from", l1BlockRangeHashFrom, "to", lastAppliedL1Block, "err", err) - return nil, fmt.Errorf("chunk-proposer failed to get l1 block range hash error: %w", err) - } - - chunk.LastAppliedL1Block = lastAppliedL1Block - chunk.L1BlockRangeHash = *l1BlockRangeHash for i, block := range blocks { // metric values diff --git a/rollup/internal/controller/watcher/l2_watcher.go b/rollup/internal/controller/watcher/l2_watcher.go index 347633e3dc..240a0a01ee 100644 --- a/rollup/internal/controller/watcher/l2_watcher.go +++ b/rollup/internal/controller/watcher/l2_watcher.go @@ -103,6 +103,12 @@ func (w *L2WatcherClient) TryFetchRunningMissingBlocks(blockHeight uint64) { return } + lastAppliedL1BlockNumber, err := w.l2BlockOrm.GetL2BlocksLastAppliedL1BlockNumber(w.ctx) + if err != nil { + log.Error("failed to GetL2BlocksLastAppliedL1BlockNumber", "err", err) + return + } + // Fetch and store block traces for missing blocks for from := heightInDB + 1; from <= blockHeight; from += blockTracesFetchLimit { to := from + blockTracesFetchLimit - 1 @@ -111,7 +117,8 @@ func (w *L2WatcherClient) TryFetchRunningMissingBlocks(blockHeight uint64) { to = blockHeight } - if err = w.getAndStoreBlockTraces(w.ctx, from, to); err != nil { + lastAppliedL1BlockNumber, err = w.getAndStoreBlockTraces(w.ctx, lastAppliedL1BlockNumber, from, to) + if err != nil { log.Error("fail to getAndStoreBlockTraces", "from", from, "to", to, "err", err) return } @@ -153,30 +160,40 @@ func txsToTxsData(txs gethTypes.Transactions) []*gethTypes.TransactionData { return txsData } -func (w *L2WatcherClient) getAndStoreBlockTraces(ctx context.Context, from, to uint64) error { +func (w *L2WatcherClient) getAndStoreBlockTraces(ctx context.Context, previousLastAppliedL1BlockNumber, from, to uint64) (uint64, error) { var blocks []*types.WrappedBlock for number := from; number <= to; number++ { log.Debug("retrieving block", "height", number) block, err := w.GetBlockByNumberOrHash(ctx, rpc.BlockNumberOrHashWithNumber(rpc.BlockNumber(number))) if err != nil { - return fmt.Errorf("failed to GetBlockByNumberOrHash: %v. number: %v", err, number) + return 0, fmt.Errorf("failed to GetBlockByNumberOrHash: %v. number: %v", err, number) } if block.RowConsumption == nil { - return fmt.Errorf("fetched block does not contain RowConsumption. number: %v", number) + return 0, fmt.Errorf("fetched block does not contain RowConsumption. number: %v", number) + } + + lastAppliedL1BlockNumber, _ := block.L1BlockHashesInfo() + // when no new l1 block hashes tx is found, set the lastAppliedL1BlockNumber to be previous + if lastAppliedL1BlockNumber == 0 { + lastAppliedL1BlockNumber = previousLastAppliedL1BlockNumber } log.Info("retrieved block", "height", block.Header().Number, "hash", block.Header().Hash().String()) withdrawRoot, err3 := w.StorageAt(ctx, w.messageQueueAddress, w.withdrawTrieRootSlot, big.NewInt(int64(number))) if err3 != nil { - return fmt.Errorf("failed to get withdrawRoot: %v. number: %v", err3, number) + return 0, fmt.Errorf("failed to get withdrawRoot: %v. number: %v", err3, number) } + blocks = append(blocks, &types.WrappedBlock{ - Header: block.Header(), - Transactions: txsToTxsData(block.Transactions()), - WithdrawRoot: common.BytesToHash(withdrawRoot), - RowConsumption: block.RowConsumption, + Header: block.Header(), + Transactions: txsToTxsData(block.Transactions()), + WithdrawRoot: common.BytesToHash(withdrawRoot), + RowConsumption: block.RowConsumption, + LastAppliedL1Block: lastAppliedL1BlockNumber, }) + + previousLastAppliedL1BlockNumber = lastAppliedL1BlockNumber } if len(blocks) > 0 { @@ -184,11 +201,11 @@ func (w *L2WatcherClient) getAndStoreBlockTraces(ctx context.Context, from, to u w.metrics.rollupL2BlockL1CommitCalldataSize.Set(float64(block.EstimateL1CommitCalldataSize())) } if err := w.l2BlockOrm.InsertL2Blocks(w.ctx, blocks); err != nil { - return fmt.Errorf("failed to batch insert BlockTraces: %v", err) + return 0, fmt.Errorf("failed to batch insert BlockTraces: %v", err) } } - return nil + return previousLastAppliedL1BlockNumber, nil } // FetchContractEvent pull latest event logs from given contract address and save in DB diff --git a/rollup/internal/orm/batch.go b/rollup/internal/orm/batch.go index 20057f1da6..1b9d2516cd 100644 --- a/rollup/internal/orm/batch.go +++ b/rollup/internal/orm/batch.go @@ -34,7 +34,7 @@ type Batch struct { ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"` BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"` - LastAppliedL1Block uint64 `json:"last_applied_l1_block"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` L1BlockRangeHash string `json:"l1_block_range_hash"` // proof diff --git a/rollup/internal/orm/chunk.go b/rollup/internal/orm/chunk.go index ac6196791f..e66eec6583 100644 --- a/rollup/internal/orm/chunk.go +++ b/rollup/internal/orm/chunk.go @@ -155,7 +155,7 @@ func (o *Chunk) GetChunksGEIndex(ctx context.Context, index uint64, limit int) ( } // InsertChunk inserts a new chunk into the database. -func (o *Chunk) InsertChunk(ctx context.Context, parentDbChunk *Chunk, chunk *types.Chunk, dbTX ...*gorm.DB) (*Chunk, error) { +func (o *Chunk) InsertChunk(ctx context.Context, chunk *types.Chunk, dbTX ...*gorm.DB) (*Chunk, error) { if chunk == nil || len(chunk.Blocks) == 0 { return nil, errors.New("invalid args") } @@ -164,15 +164,20 @@ func (o *Chunk) InsertChunk(ctx context.Context, parentDbChunk *Chunk, chunk *ty var totalL1MessagePoppedBefore uint64 var parentChunkHash string var parentChunkStateRoot string + parentChunk, err := o.GetLatestChunk(ctx) + if err != nil && !errors.Is(errors.Unwrap(err), gorm.ErrRecordNotFound) { + log.Error("failed to get latest chunk", "err", err) + return nil, fmt.Errorf("Chunk.InsertChunk error: %w", err) + } - // if parentDbChunk==nil then err==gorm.ErrRecordNotFound, which means there's + // if parentChunk==nil then err==gorm.ErrRecordNotFound, which means there's // not chunk record in the db, we then use default empty values for the creating chunk; // if parentDbChunk!=nil then err=nil, then we fill the parentChunk-related data into the creating chunk - if parentDbChunk != nil { - chunkIndex = parentDbChunk.Index + 1 - totalL1MessagePoppedBefore = parentDbChunk.TotalL1MessagesPoppedBefore + uint64(parentDbChunk.TotalL1MessagesPoppedInChunk) - parentChunkHash = parentDbChunk.Hash - parentChunkStateRoot = parentDbChunk.StateRoot + if parentChunk != nil { + chunkIndex = parentChunk.Index + 1 + totalL1MessagePoppedBefore = parentChunk.TotalL1MessagesPoppedBefore + uint64(parentChunk.TotalL1MessagesPoppedInChunk) + parentChunkHash = parentChunk.Hash + parentChunkStateRoot = parentChunk.StateRoot } hash, err := chunk.Hash(totalL1MessagePoppedBefore) diff --git a/rollup/internal/orm/l2_block.go b/rollup/internal/orm/l2_block.go index ae34625455..22a186fd45 100644 --- a/rollup/internal/orm/l2_block.go +++ b/rollup/internal/orm/l2_block.go @@ -65,13 +65,25 @@ func (o *L2Block) GetL2BlocksLatestHeight(ctx context.Context) (uint64, error) { return maxNumber, nil } +func (o *L2Block) GetL2BlocksLastAppliedL1BlockNumber(ctx context.Context) (uint64, error) { + db := o.db.WithContext(ctx) + db = db.Model(&L2Block{}) + db = db.Select("COALESCE(MAX(last_applied_l1_block), 0)") + + var lastAppliedL1Number uint64 + if err := db.Row().Scan(&lastAppliedL1Number); err != nil { + return 0, fmt.Errorf("L2Block.GetL2BlocksLastAppliedL1BlockNumber error: %w", err) + } + return lastAppliedL1Number, nil +} + // GetL2WrappedBlocksGEHeight retrieves L2 blocks that have a block number greater than or equal to the given height. // The blocks are converted into WrappedBlock format for output. // The returned blocks are sorted in ascending order by their block number. func (o *L2Block) GetL2WrappedBlocksGEHeight(ctx context.Context, height uint64, limit int) ([]*types.WrappedBlock, error) { db := o.db.WithContext(ctx) db = db.Model(&L2Block{}) - db = db.Select("header, transactions, withdraw_root, row_consumption") + db = db.Select("header, transactions, withdraw_root, row_consumption, last_applied_l1_block") db = db.Where("number >= ?", height) db = db.Order("number ASC") @@ -103,6 +115,8 @@ func (o *L2Block) GetL2WrappedBlocksGEHeight(ctx context.Context, height uint64, return nil, fmt.Errorf("L2Block.GetL2WrappedBlocksGEHeight error: %w", err) } + wrappedBlock.LastAppliedL1Block = v.LastAppliedL1Block + wrappedBlocks = append(wrappedBlocks, &wrappedBlock) } From aeccdb0da0f1227552a85a5b26b7f70a5104f65d Mon Sep 17 00:00:00 2001 From: failfmi Date: Sat, 23 Dec 2023 11:07:06 +0200 Subject: [PATCH 43/59] fix(common/types): chunk encoding to match; block txs to include l1 block hashes specific fields add missing chunk setters --- common/types/batch_header_test.go | 10 ++-- common/types/block.go | 53 +++++++++++++------ common/types/chunk.go | 13 ++--- common/types/chunk_test.go | 4 +- common/types/transaction_data.go | 16 ++++++ contracts/src/libraries/codec/ChunkCodec.sol | 1 + go.work.sum | 2 + rollup/go.mod | 2 +- .../internal/controller/relayer/l2_relayer.go | 10 +--- .../controller/watcher/batch_proposer.go | 5 +- .../internal/controller/watcher/l2_watcher.go | 42 +++++++++------ rollup/internal/orm/batch.go | 2 +- rollup/internal/orm/l2_block.go | 4 +- 13 files changed, 105 insertions(+), 59 deletions(-) create mode 100644 common/types/transaction_data.go diff --git a/common/types/batch_header_test.go b/common/types/batch_header_test.go index da3b0d5a92..d9f4241553 100644 --- a/common/types/batch_header_test.go +++ b/common/types/batch_header_test.go @@ -151,7 +151,7 @@ func TestBatchHeaderEncode(t *testing.T) { assert.NotNil(t, batchHeader) bytes := batchHeader.Encode() assert.Equal(t, 129, len(bytes)) - assert.Equal(t, "0100000000000000010000000000000000000000000000000010a64c9bd905f8caf5d668fbda622d6558c5a42cdb4b3895709743d159c22e537afdc2ea6f8daaa4b430ce1424f59bcec401d00e34a99b1da457babc405a86070000000000000000290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563", common.Bytes2Hex(bytes)) + assert.Equal(t, "0100000000000000010000000000000000000000000000000079841093f56d4e454a27371c924b604f9f1831bcecf26ef5549a4b86b5f7cc1b7afdc2ea6f8daaa4b430ce1424f59bcec401d00e34a99b1da457babc405a86070000000000000000290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563", common.Bytes2Hex(bytes)) // With L1 Msg templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -169,7 +169,7 @@ func TestBatchHeaderEncode(t *testing.T) { assert.NotNil(t, batchHeader) bytes = batchHeader.Encode() assert.Equal(t, 161, len(bytes)) - assert.Equal(t, "010000000000000001000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1ca7afdc2ea6f8daaa4b430ce1424f59bcec401d00e34a99b1da457babc405a860700000000000000000000000000000000000000000000000000000000000003ff0000000000000000290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563", common.Bytes2Hex(bytes)) + assert.Equal(t, "010000000000000001000000000000000b000000000000000bd66e72c479686e1f25b496c0fa38f8722b3fdd381ce3bf56e78129b510adbbd77afdc2ea6f8daaa4b430ce1424f59bcec401d00e34a99b1da457babc405a860700000000000000000000000000000000000000000000000000000000000003ff0000000000000000290decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563", common.Bytes2Hex(bytes)) } func TestBatchHeaderHash(t *testing.T) { @@ -197,7 +197,7 @@ func TestBatchHeaderHash(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader) hash := batchHeader.Hash() - assert.Equal(t, "e5131040ff2c5c0dafc629651452e3c6d84e2a5512e883cc94a3ca1677fb5d5e", common.Bytes2Hex(hash.Bytes())) + assert.Equal(t, "c1c81ddb1216d8bcb26d8fb0b60d3c10a3f37c15cdd53893ea31e76b20de51f4", common.Bytes2Hex(hash.Bytes())) templateBlockTrace, err = os.ReadFile("../testdata/blockTrace_03.json") assert.NoError(t, err) @@ -213,7 +213,7 @@ func TestBatchHeaderHash(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader2) hash2 := batchHeader2.Hash() - assert.Equal(t, "922db89ce8a8e3e202d43ca70e59b9277c1f0d90c72daed7270896f410abb3ac", common.Bytes2Hex(hash2.Bytes())) + assert.Equal(t, "c2ce574d3331ea9f7a352a0b1fb7e90db246590938c8e7a9b39ff53a23a1a568", common.Bytes2Hex(hash2.Bytes())) // With L1 Msg templateBlockTrace3, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -230,7 +230,7 @@ func TestBatchHeaderHash(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, batchHeader) hash = batchHeader.Hash() - assert.Equal(t, "438ed7f9d8d5a312b5eab7527789c7c1fbb26c9b2700e5f4ce0facd7824bd5ba", common.Bytes2Hex(hash.Bytes())) + assert.Equal(t, "a66ce437f630868893f2f2fc6894a50363a128a6db1bf25ba0b19a16f1bf5361", common.Bytes2Hex(hash.Bytes())) } func TestBatchHeaderDecode(t *testing.T) { diff --git a/common/types/block.go b/common/types/block.go index 3936fd33d2..2d0a3a7e18 100644 --- a/common/types/block.go +++ b/common/types/block.go @@ -31,10 +31,10 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { type WrappedBlock struct { Header *types.Header `json:"header"` // Transactions is only used for recover types.Transactions, the from of types.TransactionData field is missing. - Transactions []*types.TransactionData `json:"transactions"` - WithdrawRoot common.Hash `json:"withdraw_trie_root,omitempty"` - RowConsumption *types.RowConsumption `json:"row_consumption"` - LastAppliedL1Block uint64 `json:"last_applied_l1_block"` + Transactions []*TransactionData `json:"transactions"` + WithdrawRoot common.Hash `json:"withdraw_trie_root,omitempty"` + RowConsumption *types.RowConsumption `json:"row_consumption"` + LastAppliedL1Block uint64 `json:"last_applied_l1_block"` txPayloadLengthCache map[string]uint64 } @@ -149,7 +149,7 @@ func (w *WrappedBlock) EstimateL1CommitGas() uint64 { return total } -func (w *WrappedBlock) getTxPayloadLength(txData *types.TransactionData) uint64 { +func (w *WrappedBlock) getTxPayloadLength(txData *TransactionData) uint64 { if w.txPayloadLengthCache == nil { w.txPayloadLengthCache = make(map[string]uint64) } @@ -168,23 +168,42 @@ func (w *WrappedBlock) getTxPayloadLength(txData *types.TransactionData) uint64 return txPayloadLength } -func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { +func convertTxDataToRLPEncoding(txData *TransactionData) ([]byte, error) { data, err := hexutil.Decode(txData.Data) if err != nil { return nil, fmt.Errorf("failed to decode txData.Data: %s, err: %w", txData.Data, err) } - tx := types.NewTx(&types.LegacyTx{ - Nonce: txData.Nonce, - To: txData.To, - Value: txData.Value.ToInt(), - Gas: txData.Gas, - GasPrice: txData.GasPrice.ToInt(), - Data: data, - V: txData.V.ToInt(), - R: txData.R.ToInt(), - S: txData.S.ToInt(), - }) + var tx *types.Transaction + if txData.Type == types.L1BlockHashesTxType { + if txData.FirstAppliedL1Block == nil { + return nil, errors.New("missing required field 'firstAppliedL1Block' in transaction") + } + if txData.LastAppliedL1Block == nil { + return nil, errors.New("missing required field 'lastAppliedL1Block' in transaction") + } + + tx = types.NewTx(&types.L1BlockHashesTx{ + FirstAppliedL1Block: uint64(*txData.FirstAppliedL1Block), + LastAppliedL1Block: uint64(*txData.LastAppliedL1Block), + BlockHashesRange: txData.BlockRangeHash, + To: txData.To, + Data: data, + Sender: txData.From, + }) + } else { + tx = types.NewTx(&types.LegacyTx{ + Nonce: txData.Nonce, + To: txData.To, + Value: txData.Value.ToInt(), + Gas: txData.Gas, + GasPrice: txData.GasPrice.ToInt(), + Data: data, + V: txData.V.ToInt(), + R: txData.R.ToInt(), + S: txData.S.ToInt(), + }) + } rlpTxData, err := tx.MarshalBinary() if err != nil { diff --git a/common/types/chunk.go b/common/types/chunk.go index e5b31035f9..1381f5c5b0 100644 --- a/common/types/chunk.go +++ b/common/types/chunk.go @@ -64,7 +64,7 @@ func (c *Chunk) Encode(totalL1MessagePoppedBefore uint64) ([]byte, error) { // Append rlp-encoded l2Txs for _, txData := range block.Transactions { // TODO(l1blockhashes): Check if necessary - if txData.Type == types.L1MessageTxType || txData.Type == types.L1BlockHashesTxType { + if txData.Type == types.L1MessageTxType { continue } rlpTxData, err := convertTxDataToRLPEncoding(txData) @@ -98,9 +98,11 @@ func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) (common.Hash, error) { // concatenate block contexts var dataBytes []byte + + chunkBytes = chunkBytes[1:] // remove num blocks for i := 0; i < int(numBlocks); i++ { - // only the first 58 bytes of each BlockContext are needed for the hashing process - dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) + block := chunkBytes[68*i : 68*i+68] + dataBytes = append(dataBytes, block[:58]...) // TODO(l1blockhashes): skips lastAppliedL1Block } // concatenate l1 and l2 tx hashes @@ -113,8 +115,7 @@ func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) (common.Hash, error) { if err != nil { return common.Hash{}, err } - // TODO(l1blockhashes): Check if necessary - if txData.Type == types.L1MessageTxType || txData.Type == types.L1BlockHashesTxType { + if txData.Type == types.L1MessageTxType { l1TxHashes = append(l1TxHashes, hashBytes...) } else { l2TxHashes = append(l2TxHashes, hashBytes...) @@ -128,7 +129,7 @@ func (c *Chunk) Hash(totalL1MessagePoppedBefore uint64) (common.Hash, error) { binary.BigEndian.PutUint64(lastAppliedL1BlockBytes[:], c.LastAppliedL1Block) dataBytes = append(dataBytes, lastAppliedL1BlockBytes[:]...) dataBytes = append(dataBytes, c.L1BlockRangeHash.Bytes()...) - + hash := crypto.Keccak256Hash(dataBytes) return hash, nil } diff --git a/common/types/chunk_test.go b/common/types/chunk_test.go index 7361830422..2a4e4908dd 100644 --- a/common/types/chunk_test.go +++ b/common/types/chunk_test.go @@ -129,7 +129,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0xaca17d4cae322f15537a4b770332171478991fd8b6a158327205ae4596cc838c", hash.Hex()) + assert.Equal(t, "0x58eedb43e56fa9a7f460a72d966c304df3edab4f20506a6580ebd7b94b7a946e", hash.Hex()) // Test case 4: successfully hashing a chunk on two blocks each with L1 and L2 txs templateBlockTrace2, err := os.ReadFile("../testdata/blockTrace_04.json") @@ -144,7 +144,7 @@ func TestChunkHash(t *testing.T) { } hash, err = chunk.Hash(0) assert.NoError(t, err) - assert.Equal(t, "0x811b8fd798aacaaeb9857257d5053aabdc65293f301e58916c24b85b11b9db95", hash.Hex()) + assert.Equal(t, "0xb69e8db42cbc11065ffd691947b1271e3554d0fd60bd9bd343e74221343974bc", hash.Hex()) } func TestErrorPaths(t *testing.T) { diff --git a/common/types/transaction_data.go b/common/types/transaction_data.go new file mode 100644 index 0000000000..8fd6ee5f96 --- /dev/null +++ b/common/types/transaction_data.go @@ -0,0 +1,16 @@ +package types + +import ( + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" + gethTypes "github.com/scroll-tech/go-ethereum/core/types" +) + +// This is needed as we include the L1 Block Hashes Tx into the chunk blocks. +// The transaction hash is generated with the inclusion of the fields below. +type TransactionData struct { + gethTypes.TransactionData + FirstAppliedL1Block *hexutil.Uint64 `json:"firstAppliedL1Block,omitempty"` + LastAppliedL1Block *hexutil.Uint64 `json:"lastAppliedL1Block,omitempty"` + BlockRangeHash []common.Hash `json:"blockRangeHash,omitempty"` +} diff --git a/contracts/src/libraries/codec/ChunkCodec.sol b/contracts/src/libraries/codec/ChunkCodec.sol index f190c6fc46..e8f1ea831b 100644 --- a/contracts/src/libraries/codec/ChunkCodec.sol +++ b/contracts/src/libraries/codec/ChunkCodec.sol @@ -93,6 +93,7 @@ library ChunkCodec { uint256 index ) internal pure returns (uint256) { // only first 58 bytes is needed. + // TODO(l1blockhashes): skips lastAppliedL1Block. assembly { chunkPtr := add(chunkPtr, add(1, mul(BLOCK_CONTEXT_LENGTH, index))) mstore(dstPtr, mload(chunkPtr)) // first 32 bytes diff --git a/go.work.sum b/go.work.sum index 766f509ff1..ed13bf7b42 100644 --- a/go.work.sum +++ b/go.work.sum @@ -528,6 +528,8 @@ github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgx github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/limechain/scroll-go-ethereum v0.0.0-20231128150717-8a4a05ac3f85 h1:kjmwYeYgZuw2P/wLFlYJJnTLO4H4a4kRsrj7I4Fwjag= github.com/limechain/scroll-go-ethereum v0.0.0-20231128150717-8a4a05ac3f85/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= +github.com/limechain/scroll-go-ethereum v0.0.0-20231223062238-eaa38f09ce93 h1:sjxxhJX/ZA3vIgwxScgDyAEEoLv+Bh1NrrsJ0cGmaZw= +github.com/limechain/scroll-go-ethereum v0.0.0-20231223062238-eaa38f09ce93/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= diff --git a/rollup/go.mod b/rollup/go.mod index 0669218fe1..905e2b8ffa 100644 --- a/rollup/go.mod +++ b/rollup/go.mod @@ -87,4 +87,4 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum 8a4a05ac3f85dd674f96df83181105d47c0a76d0 +replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum eaa38f09ce932a52bd29a665e81526d447af0b95 diff --git a/rollup/internal/controller/relayer/l2_relayer.go b/rollup/internal/controller/relayer/l2_relayer.go index 32cfa4b416..68b4967370 100644 --- a/rollup/internal/controller/relayer/l2_relayer.go +++ b/rollup/internal/controller/relayer/l2_relayer.go @@ -355,14 +355,6 @@ func (r *Layer2Relayer) ProcessPendingBatches() { return } - parentBatchLatestChunk, err := r.chunkOrm.GetChunkByHash(r.ctx, dbChunks[0].ParentChunkHash) - if err != nil { - log.Error("Failed to fetch parent chunk", - "chunk index", dbChunks[0].Index, - "error", err) - return - } - encodedChunks := make([][]byte, len(dbChunks)) for i, c := range dbChunks { var wrappedBlocks []*types.WrappedBlock @@ -387,7 +379,7 @@ func (r *Layer2Relayer) ProcessPendingBatches() { encodedChunks[i] = chunkBytes } - calldata, err := r.l1RollupABI.Pack("commitBatch", currentBatchHeader.Version(), parentBatch.BatchHeader, encodedChunks, currentBatchHeader.SkippedL1MessageBitmap(), parentBatchLatestChunk.LastAppliedL1Block) + calldata, err := r.l1RollupABI.Pack("commitBatch", currentBatchHeader.Version(), parentBatch.BatchHeader, encodedChunks, currentBatchHeader.SkippedL1MessageBitmap(), parentBatch.LastAppliedL1Block) if err != nil { log.Error("Failed to pack commitBatch", "index", batch.Index, "error", err) return diff --git a/rollup/internal/controller/watcher/batch_proposer.go b/rollup/internal/controller/watcher/batch_proposer.go index 0c3b0ba1d6..36820d52e5 100644 --- a/rollup/internal/controller/watcher/batch_proposer.go +++ b/rollup/internal/controller/watcher/batch_proposer.go @@ -7,6 +7,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/log" "gorm.io/gorm" @@ -290,7 +291,9 @@ func (p *BatchProposer) dbChunksToRollupChunks(dbChunks []*orm.Chunk) ([]*types. return nil, err } chunks[i] = &types.Chunk{ - Blocks: wrappedBlocks, + Blocks: wrappedBlocks, + LastAppliedL1Block: c.LastAppliedL1Block, + L1BlockRangeHash: common.HexToHash(c.L1BlockRangeHash), } } return chunks, nil diff --git a/rollup/internal/controller/watcher/l2_watcher.go b/rollup/internal/controller/watcher/l2_watcher.go index 240a0a01ee..19f068172b 100644 --- a/rollup/internal/controller/watcher/l2_watcher.go +++ b/rollup/internal/controller/watcher/l2_watcher.go @@ -127,8 +127,8 @@ func (w *L2WatcherClient) TryFetchRunningMissingBlocks(blockHeight uint64) { } } -func txsToTxsData(txs gethTypes.Transactions) []*gethTypes.TransactionData { - txsData := make([]*gethTypes.TransactionData, len(txs)) +func txsToTxsData(txs gethTypes.Transactions) []*types.TransactionData { + txsData := make([]*types.TransactionData, len(txs)) for i, tx := range txs { v, r, s := tx.RawSignatureValues() @@ -141,21 +141,31 @@ func txsToTxsData(txs gethTypes.Transactions) []*gethTypes.TransactionData { nonce = msg.QueueIndex } - txsData[i] = &gethTypes.TransactionData{ - Type: tx.Type(), - TxHash: tx.Hash().String(), - Nonce: nonce, - ChainId: (*hexutil.Big)(tx.ChainId()), - Gas: tx.Gas(), - GasPrice: (*hexutil.Big)(tx.GasPrice()), - To: tx.To(), - Value: (*hexutil.Big)(tx.Value()), - Data: hexutil.Encode(tx.Data()), - IsCreate: tx.To() == nil, - V: (*hexutil.Big)(v), - R: (*hexutil.Big)(r), - S: (*hexutil.Big)(s), + txData := &types.TransactionData{ + TransactionData: gethTypes.TransactionData{ + Type: tx.Type(), + TxHash: tx.Hash().String(), + Nonce: nonce, + ChainId: (*hexutil.Big)(tx.ChainId()), + Gas: tx.Gas(), + GasPrice: (*hexutil.Big)(tx.GasPrice()), + To: tx.To(), + Value: (*hexutil.Big)(tx.Value()), + Data: hexutil.Encode(tx.Data()), + IsCreate: tx.To() == nil, + V: (*hexutil.Big)(v), + R: (*hexutil.Big)(r), + S: (*hexutil.Big)(s), + }, + } + + if l1blockHashesTx := tx.AsL1BlockHashesTx(); l1blockHashesTx != nil { + txData.FirstAppliedL1Block = (*hexutil.Uint64)(&l1blockHashesTx.FirstAppliedL1Block) + txData.LastAppliedL1Block = (*hexutil.Uint64)(&l1blockHashesTx.LastAppliedL1Block) + txData.BlockRangeHash = l1blockHashesTx.BlockHashesRange } + + txsData[i] = txData } return txsData } diff --git a/rollup/internal/orm/batch.go b/rollup/internal/orm/batch.go index 1b9d2516cd..28e0b65857 100644 --- a/rollup/internal/orm/batch.go +++ b/rollup/internal/orm/batch.go @@ -292,7 +292,7 @@ func (o *Batch) InsertBatch(ctx context.Context, chunks []*types.Chunk, batchMet OracleStatus: int16(types.GasOraclePending), TotalL1CommitGas: batchMeta.TotalL1CommitGas, TotalL1CommitCalldataSize: batchMeta.TotalL1CommitCalldataSize, - LastAppliedL1Block: chunks[numChunks-1].LastAppliedL1Block, + LastAppliedL1Block: batchHeader.LastAppliedL1Block(), L1BlockRangeHash: batchHeader.L1BlockRangeHash().Hex(), } diff --git a/rollup/internal/orm/l2_block.go b/rollup/internal/orm/l2_block.go index 22a186fd45..1a3c010016 100644 --- a/rollup/internal/orm/l2_block.go +++ b/rollup/internal/orm/l2_block.go @@ -160,7 +160,7 @@ func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint6 db := o.db.WithContext(ctx) db = db.Model(&L2Block{}) - db = db.Select("header, transactions, withdraw_root, row_consumption") + db = db.Select("header, transactions, withdraw_root, row_consumption, last_applied_l1_block") db = db.Where("number >= ? AND number <= ?", startBlockNumber, endBlockNumber) db = db.Order("number ASC") @@ -193,6 +193,8 @@ func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint6 return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber) } + wrappedBlock.LastAppliedL1Block = v.LastAppliedL1Block + wrappedBlocks = append(wrappedBlocks, &wrappedBlock) } From 83f5c840b60a732911751ce79ccf39a3d172de69 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 10 Jan 2024 14:02:20 +0200 Subject: [PATCH 44/59] fix(prover): add gen chunk fix --- prover/core/prover.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prover/core/prover.go b/prover/core/prover.go index 0988ccfd87..924e68bbe4 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -113,11 +113,11 @@ func (p *ProverCore) ProveChunk( return nil, fmt.Errorf("prover is not a chunk-prover (type: %v), but is trying to prove a chunk", p.cfg.ProofType) } - chunkTraceByt, err := json.Marshal(chunkTrace) + chunkTraceByt, err := json.Marshal(chunkTrace.BlockTraces) if err != nil { return nil, err } - proofByt, err := p.proveChunk(chunkTraceByt) + proofByt, err := p.proveChunk(chunkTraceByt, chunkTrace.PrevLastAppliedL1Block, chunkTrace.LastAppliedL1Block, chunkTrace.L1BlockRangeHash) if err != nil { return nil, err } From 6d51b40b4c73fe34ebebaf4c32a0e5d46070313b Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 10 Jan 2024 14:24:43 +0200 Subject: [PATCH 45/59] fix(prover): add fix prover gen chunk trace --- common/libzkp/impl/Cargo.toml | 4 ++-- common/libzkp/impl/src/chunk.rs | 8 ++++---- common/libzkp/interface/libzkp.h | 2 +- prover/core/prover.go | 20 ++++++-------------- 4 files changed, 13 insertions(+), 21 deletions(-) diff --git a/common/libzkp/impl/Cargo.toml b/common/libzkp/impl/Cargo.toml index 227c001568..5402eb85f6 100644 --- a/common/libzkp/impl/Cargo.toml +++ b/common/libzkp/impl/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" crate-type = ["cdylib"] [patch.crates-io] -ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" } +ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc", features = ["scroll"] } [patch."https://github.com/privacy-scaling-explorations/halo2.git"] halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" } [patch."https://github.com/privacy-scaling-explorations/poseidon.git"] @@ -21,7 +21,7 @@ halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = [dependencies] halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" } -prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.9.7", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] } +prover = { git = "https://github.com/LimeChain/scroll-zkevm-circuits", branch = "LimeChain/wip-block-hashes-poc", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] } base64 = "0.13.0" env_logger = "0.9.0" diff --git a/common/libzkp/impl/src/chunk.rs b/common/libzkp/impl/src/chunk.rs index c55f98bb7e..b5a2957e82 100644 --- a/common/libzkp/impl/src/chunk.rs +++ b/common/libzkp/impl/src/chunk.rs @@ -66,16 +66,16 @@ pub unsafe extern "C" fn get_chunk_vk() -> *const c_char { /// # Safety #[no_mangle] -pub unsafe extern "C" fn gen_chunk_proof(block_traces: *const c_char) -> *const c_char { +pub unsafe extern "C" fn gen_chunk_proof(chunk_trace: *const c_char) -> *const c_char { let proof_result: Result, String> = panic_catch(|| { - let block_traces = c_char_to_vec(block_traces); - let block_traces = serde_json::from_slice::>(&block_traces) + let chunk_trace = c_char_to_vec(chunk_trace); + let chunk_trace = serde_json::from_slice::>(&chunk_trace) .map_err(|e| format!("failed to deserialize block traces: {e:?}"))?; let proof = PROVER .get_mut() .expect("failed to get mutable reference to PROVER.") - .gen_chunk_proof(block_traces, None, None, OUTPUT_DIR.as_deref()) + .gen_chunk_proof(chunk_trace, None, None, OUTPUT_DIR.as_deref()) .map_err(|e| format!("failed to generate proof: {e:?}"))?; serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}")) diff --git a/common/libzkp/interface/libzkp.h b/common/libzkp/interface/libzkp.h index 1c13960879..7fe37a1c58 100644 --- a/common/libzkp/interface/libzkp.h +++ b/common/libzkp/interface/libzkp.h @@ -8,7 +8,7 @@ char verify_batch_proof(char* proof); void init_chunk_prover(char* params_dir, char* assets_dir); void init_chunk_verifier(char* params_dir, char* assets_dir); char* get_chunk_vk(); -char* gen_chunk_proof(char* block_traces); +char* gen_chunk_proof(char* chunk_trace); char verify_chunk_proof(char* proof); char* block_traces_to_chunk_info(char* block_traces); diff --git a/prover/core/prover.go b/prover/core/prover.go index 924e68bbe4..f2aa14e1db 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -113,11 +113,11 @@ func (p *ProverCore) ProveChunk( return nil, fmt.Errorf("prover is not a chunk-prover (type: %v), but is trying to prove a chunk", p.cfg.ProofType) } - chunkTraceByt, err := json.Marshal(chunkTrace.BlockTraces) + chunkTraceByt, err := json.Marshal(chunkTrace) if err != nil { return nil, err } - proofByt, err := p.proveChunk(chunkTraceByt, chunkTrace.PrevLastAppliedL1Block, chunkTrace.LastAppliedL1Block, chunkTrace.L1BlockRangeHash) + proofByt, err := p.proveChunk(chunkTraceByt) if err != nil { return nil, err } @@ -208,20 +208,12 @@ func (p *ProverCore) proveBatch(chunkInfosByt []byte, chunkProofsByt []byte) ([] return result.Message, nil } -func (p *ProverCore) proveChunk(chunkTraceByt []byte, prevLastAppliedL1Block uint64, lastAppliedL1Block uint64, l1BlockRangeHash []byte) ([]byte, error) { - chunkTraceBytStr := C.CString(string(chunkTraceByt)) - defer C.free(unsafe.Pointer(chunkTraceBytStr)) - - l1BlockRangeHashStr := C.CString(string(l1BlockRangeHash)) - defer C.free(unsafe.Pointer(l1BlockRangeHashStr)) +func (p *ProverCore) proveChunk(chunkTraceByt []byte) ([]byte, error) { + tracesStr := C.CString(string(chunkTraceByt)) + defer C.free(unsafe.Pointer(tracesStr)) log.Info("Start to create chunk proof ...") - cProof := C.gen_chunk_proof( - tracesStr, - C.uint64_t(prevLastAppliedL1Block), - C.uint64_t(lastAppliedL1Block), - l1BlockRangeHashStr, - ) + cProof := C.gen_chunk_proof(tracesStr) defer C.free_c_chars(cProof) log.Info("Finish creating chunk proof!") From 82ef0456d19eb601d3291bf8053f259ecbdeddc1 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Wed, 10 Jan 2024 15:24:11 +0200 Subject: [PATCH 46/59] fix: try fix zktrie build issue on linux --- common/libzkp/impl/Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/libzkp/impl/Cargo.lock b/common/libzkp/impl/Cargo.lock index d1174c0bda..5c0880929a 100644 --- a/common/libzkp/impl/Cargo.lock +++ b/common/libzkp/impl/Cargo.lock @@ -4185,7 +4185,7 @@ dependencies = [ [[package]] name = "zktrie" version = "0.2.0" -source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.6#83318659773604fa565e2ebeb810a6d3746f0af4" +source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.6#82dc6591add75a307d5004801f70276c22bf843f" dependencies = [ "gobuild 0.1.0-alpha.2 (registry+https://github.com/rust-lang/crates.io-index)", ] From b314ee3828b14627b28460bb99943307ffdd0c16 Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 10 Jan 2024 16:17:30 +0200 Subject: [PATCH 47/59] fix(coordinator/orm/block): missing query last applied l1 block column --- coordinator/internal/orm/l2_block.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/coordinator/internal/orm/l2_block.go b/coordinator/internal/orm/l2_block.go index dcbe20f4b1..06b1752233 100644 --- a/coordinator/internal/orm/l2_block.go +++ b/coordinator/internal/orm/l2_block.go @@ -56,7 +56,7 @@ func (*L2Block) TableName() string { func (o *L2Block) GetL2BlocksByChunkHash(ctx context.Context, chunkHash string) ([]*types.WrappedBlock, error) { db := o.db.WithContext(ctx) db = db.Model(&L2Block{}) - db = db.Select("header, transactions, withdraw_root, row_consumption") + db = db.Select("header, transactions, withdraw_root, row_consumption, last_applied_l1_block") db = db.Where("chunk_hash = ?", chunkHash) db = db.Order("number ASC") From 19ddb6616d269d912f5ba25d2c04ec461a402ac2 Mon Sep 17 00:00:00 2001 From: failfmi Date: Wed, 10 Jan 2024 19:35:32 +0200 Subject: [PATCH 48/59] build(coordinator_cron): explicit amd64 platform --- build/dockerfiles/coordinator-cron.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build/dockerfiles/coordinator-cron.Dockerfile b/build/dockerfiles/coordinator-cron.Dockerfile index ae1d700638..67fa9fb4c0 100644 --- a/build/dockerfiles/coordinator-cron.Dockerfile +++ b/build/dockerfiles/coordinator-cron.Dockerfile @@ -19,7 +19,7 @@ RUN --mount=target=. \ cd /src/coordinator/cmd/cron/ && go build -v -p 4 -o /bin/coordinator_cron # Pull coordinator into a second stage deploy alpine container -FROM alpine:latest +FROM --platform=linux/amd64 alpine:latest COPY --from=builder /bin/coordinator_cron /bin/ ENTRYPOINT ["coordinator_cron"] \ No newline at end of file From a7adb1258584df5bd50d986a6532772c0904e376 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Thu, 11 Jan 2024 09:44:58 +0200 Subject: [PATCH 49/59] fix(libzkp): add fix chunk trace --- common/libzkp/impl/src/chunk.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/libzkp/impl/src/chunk.rs b/common/libzkp/impl/src/chunk.rs index b5a2957e82..ddc2120b96 100644 --- a/common/libzkp/impl/src/chunk.rs +++ b/common/libzkp/impl/src/chunk.rs @@ -10,7 +10,7 @@ use prover::{ consts::CHUNK_VK_FILENAME, utils::init_env_and_log, zkevm::{Prover, Verifier}, - BlockTrace, ChunkProof, + ChunkProof, ChunkTrace, }; use std::{cell::OnceCell, env, ptr::null}; @@ -69,7 +69,7 @@ pub unsafe extern "C" fn get_chunk_vk() -> *const c_char { pub unsafe extern "C" fn gen_chunk_proof(chunk_trace: *const c_char) -> *const c_char { let proof_result: Result, String> = panic_catch(|| { let chunk_trace = c_char_to_vec(chunk_trace); - let chunk_trace = serde_json::from_slice::>(&chunk_trace) + let chunk_trace = serde_json::from_slice::(&chunk_trace) .map_err(|e| format!("failed to deserialize block traces: {e:?}"))?; let proof = PROVER From 6bf507b75d13fe8ed7e935efd430c67f6012157a Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Thu, 11 Jan 2024 10:15:54 +0200 Subject: [PATCH 50/59] Revert "fix: try fix zktrie build issue on linux" This reverts commit 82ef0456d19eb601d3291bf8053f259ecbdeddc1. --- common/libzkp/impl/Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/libzkp/impl/Cargo.lock b/common/libzkp/impl/Cargo.lock index 5c0880929a..d1174c0bda 100644 --- a/common/libzkp/impl/Cargo.lock +++ b/common/libzkp/impl/Cargo.lock @@ -4185,7 +4185,7 @@ dependencies = [ [[package]] name = "zktrie" version = "0.2.0" -source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.6#82dc6591add75a307d5004801f70276c22bf843f" +source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.6#83318659773604fa565e2ebeb810a6d3746f0af4" dependencies = [ "gobuild 0.1.0-alpha.2 (registry+https://github.com/rust-lang/crates.io-index)", ] From 2729a1376920cb692d43f52e32e5a92afec84d60 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 11 Jan 2024 15:50:21 +0200 Subject: [PATCH 51/59] feat(contracts/l1blocks): switch appendBlockhashes msg.sender modifier --- contracts/src/L2/L1Blocks.sol | 5 ++++- contracts/src/test/L1Blocks.t.sol | 9 +++++---- go.work.sum | 2 ++ rollup/go.mod | 2 +- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/contracts/src/L2/L1Blocks.sol b/contracts/src/L2/L1Blocks.sol index 8e4a3e5250..9708793615 100644 --- a/contracts/src/L2/L1Blocks.sol +++ b/contracts/src/L2/L1Blocks.sol @@ -17,7 +17,10 @@ contract L1Blocks { 0x46b6ca24459c6768b3d8d5d90e9189b00e3ebb5fe38fb16cb9819816d9fe1c2d; modifier onlySequencer() { - require(msg.sender == address(0), "L1Blocks: caller is not the sequencer"); + require( + msg.sender == address(0x5300000000000000000000000000000000000005), + "L1Blocks: caller is not the sequencer" + ); _; } diff --git a/contracts/src/test/L1Blocks.t.sol b/contracts/src/test/L1Blocks.t.sol index 1793e4141c..d7ed8765f0 100644 --- a/contracts/src/test/L1Blocks.t.sol +++ b/contracts/src/test/L1Blocks.t.sol @@ -10,6 +10,7 @@ contract L1BlocksTest is DSTestPlus { L1Blocks private l1Blocks; uint32 private blockHashesSize; uint64 private firstAppliedL1Block = 1; + address private sequencer = 0x5300000000000000000000000000000000000005; function setUp() public { l1Blocks = new L1Blocks(firstAppliedL1Block); @@ -39,7 +40,7 @@ contract L1BlocksTest is DSTestPlus { } } - hevm.startPrank(address(0)); + hevm.startPrank(sequencer); l1Blocks.appendBlockhashes(hashes); hevm.stopPrank(); @@ -57,7 +58,7 @@ contract L1BlocksTest is DSTestPlus { bytes32[] memory hashes = new bytes32[](1); hashes[0] = keccak256(abi.encodePacked(lowerBound)); - hevm.startPrank(address(0)); + hevm.startPrank(sequencer); l1Blocks.appendBlockhashes(hashes); hevm.stopPrank(); @@ -74,7 +75,7 @@ contract L1BlocksTest is DSTestPlus { bytes32[] memory hashes = new bytes32[](1); hashes[0] = keccak256(abi.encodePacked(upperBound)); - hevm.startPrank(address(0)); + hevm.startPrank(sequencer); l1Blocks.appendBlockhashes(hashes); hevm.stopPrank(); @@ -97,7 +98,7 @@ contract L1BlocksTest is DSTestPlus { } function testGetL1BlockHashOverwrittenRingMapSuccess() external { - hevm.startPrank(address(0)); + hevm.startPrank(sequencer); uint64 lowerBound = 0; uint8 times = 3; diff --git a/go.work.sum b/go.work.sum index ed13bf7b42..10ca8b67e1 100644 --- a/go.work.sum +++ b/go.work.sum @@ -530,6 +530,8 @@ github.com/limechain/scroll-go-ethereum v0.0.0-20231128150717-8a4a05ac3f85 h1:kj github.com/limechain/scroll-go-ethereum v0.0.0-20231128150717-8a4a05ac3f85/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= github.com/limechain/scroll-go-ethereum v0.0.0-20231223062238-eaa38f09ce93 h1:sjxxhJX/ZA3vIgwxScgDyAEEoLv+Bh1NrrsJ0cGmaZw= github.com/limechain/scroll-go-ethereum v0.0.0-20231223062238-eaa38f09ce93/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= +github.com/limechain/scroll-go-ethereum v0.0.0-20240111134224-79313f78bdcb h1:cBgnLqPgdilOqsOKz3aYlTuawDReQuuGE/5HTHUzmSo= +github.com/limechain/scroll-go-ethereum v0.0.0-20240111134224-79313f78bdcb/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= diff --git a/rollup/go.mod b/rollup/go.mod index 905e2b8ffa..a56c069105 100644 --- a/rollup/go.mod +++ b/rollup/go.mod @@ -87,4 +87,4 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum eaa38f09ce932a52bd29a665e81526d447af0b95 +replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum 79313f78bdcb3b5101459752de1926999db9eb2d From 97f3c9df6f27632a45af3774718a9c122a30506a Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 16 Jan 2024 10:36:23 +0200 Subject: [PATCH 52/59] chore: upgrade libzkp --- common/libzkp/impl/Cargo.lock | 504 ++++++++++++------------------ common/libzkp/impl/Cargo.toml | 21 +- common/libzkp/impl/rust-toolchain | 2 +- common/libzkp/impl/src/batch.rs | 12 +- common/libzkp/impl/src/lib.rs | 1 - common/libzkp/interface/libzkp.h | 2 +- prover/core/prover.go | 15 +- 7 files changed, 223 insertions(+), 334 deletions(-) diff --git a/common/libzkp/impl/Cargo.lock b/common/libzkp/impl/Cargo.lock index d1174c0bda..1dd10d5c67 100644 --- a/common/libzkp/impl/Cargo.lock +++ b/common/libzkp/impl/Cargo.lock @@ -31,7 +31,7 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "ark-std", "env_logger 0.10.0", @@ -39,7 +39,7 @@ dependencies = [ "ethers-core", "halo2_proofs", "hex", - "itertools", + "itertools 0.11.0", "log", "rand", "serde", @@ -279,16 +279,6 @@ dependencies = [ "constant_time_eq", ] -[[package]] -name = "block-buffer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab" -dependencies = [ - "arrayref", - "byte-tools", -] - [[package]] name = "block-buffer" version = "0.9.0" @@ -296,7 +286,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ "block-padding", - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -305,7 +295,7 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -320,7 +310,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5353f36341f7451062466f0b755b96ac3a9547e4d7f6b70d603fc721a7d7896" dependencies = [ - "sha2 0.10.7", + "sha2", "tinyvec", ] @@ -333,7 +323,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "eth-types", "ethers-core", @@ -342,21 +332,18 @@ dependencies = [ "gadgets", "halo2_proofs", "hex", - "itertools", - "keccak256", - "lazy_static", + "itertools 0.11.0", "log", "mock", "mpt-zktrie", "num", - "once_cell", "poseidon-circuit", "rand", "revm-precompile", "serde", "serde_json", - "strum", - "strum_macros", + "strum 0.25.0", + "strum_macros 0.25.3", ] [[package]] @@ -365,12 +352,6 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" -[[package]] -name = "byte-tools" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" - [[package]] name = "byteorder" version = "1.4.3" @@ -438,10 +419,10 @@ dependencies = [ "bs58", "coins-core", "digest 0.10.7", - "hmac 0.12.1", + "hmac", "k256 0.13.1", "serde", - "sha2 0.10.7", + "sha2", "thiserror", ] @@ -453,11 +434,11 @@ checksum = "3db8fba409ce3dc04f7d804074039eb68b960b0829161f8e06c95fea3f122528" dependencies = [ "bitvec", "coins-bip32", - "hmac 0.12.1", + "hmac", "once_cell", "pbkdf2 0.12.2", "rand", - "sha2 0.10.7", + "sha2", "thiserror", ] @@ -471,12 +452,12 @@ dependencies = [ "bech32", "bs58", "digest 0.10.7", - "generic-array 0.14.7", + "generic-array", "hex", "ripemd", "serde", "serde_derive", - "sha2 0.10.7", + "sha2", "sha3 0.10.8", "thiserror", ] @@ -593,7 +574,7 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ - "generic-array 0.14.7", + "generic-array", "rand_core", "subtle", "zeroize", @@ -605,7 +586,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "740fe28e594155f10cfc383984cbefd529d7396050557148f79cb0f621204124" dependencies = [ - "generic-array 0.14.7", + "generic-array", "rand_core", "subtle", "zeroize", @@ -617,20 +598,10 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.7", + "generic-array", "typenum", ] -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - [[package]] name = "ctr" version = "0.9.2" @@ -724,22 +695,13 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "digest" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90" -dependencies = [ - "generic-array 0.9.1", -] - [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -803,7 +765,7 @@ dependencies = [ "der 0.6.1", "digest 0.10.7", "ff 0.12.1", - "generic-array 0.14.7", + "generic-array", "group 0.12.1", "rand_core", "sec1 0.3.0", @@ -821,7 +783,7 @@ dependencies = [ "crypto-bigint 0.5.3", "digest 0.10.7", "ff 0.13.0", - "generic-array 0.14.7", + "generic-array", "group 0.13.0", "pkcs8", "rand_core", @@ -868,19 +830,6 @@ dependencies = [ "syn 2.0.27", ] -[[package]] -name = "env_logger" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", -] - [[package]] name = "env_logger" version = "0.9.3" @@ -944,13 +893,13 @@ dependencies = [ "ctr", "digest 0.10.7", "hex", - "hmac 0.12.1", + "hmac", "pbkdf2 0.11.0", "rand", "scrypt", "serde", "serde_json", - "sha2 0.10.7", + "sha2", "sha3 0.10.8", "thiserror", "uuid", @@ -959,26 +908,24 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "ethers-core", "ethers-signers", + "halo2-base", "halo2_proofs", "hex", - "itertools", - "lazy_static", - "libsecp256k1", + "itertools 0.11.0", "num", "num-bigint", - "once_cell", "poseidon-circuit", "regex", "serde", "serde_json", "serde_with", "sha3 0.10.8", - "strum", - "strum_macros", + "strum 0.25.0", + "strum_macros 0.25.3", "subtle", "uint", ] @@ -1034,14 +981,14 @@ dependencies = [ [[package]] name = "ethers-core" version = "2.0.7" -source = "git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7#e32dfd62e7cdec31160b91c5a646883594a586ba" +source = "git+https://github.com/LimeChain/scroll-ethers-rs.git?branch=LimeChain/l1-block-hashes-poc#40e9bf536667c29971bb6bea79590f13a3f670df" dependencies = [ "arrayvec", "bytes", "chrono", "elliptic-curve 0.13.5", "ethabi", - "generic-array 0.14.7", + "generic-array", "hex", "k256 0.13.1", "num_enum 0.6.1", @@ -1050,7 +997,7 @@ dependencies = [ "rlp", "serde", "serde_json", - "strum", + "strum 0.24.1", "tempfile", "thiserror", "tiny-keccak", @@ -1060,8 +1007,7 @@ dependencies = [ [[package]] name = "ethers-providers" version = "2.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56b498fd2a6c019d023e43e83488cd1fb0721f299055975aa6bac8dbf1e95f2c" +source = "git+https://github.com/LimeChain/scroll-ethers-rs.git?branch=LimeChain/l1-block-hashes-poc#40e9bf536667c29971bb6bea79590f13a3f670df" dependencies = [ "async-trait", "auto_impl", @@ -1097,8 +1043,7 @@ dependencies = [ [[package]] name = "ethers-signers" version = "2.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02c4b7e15f212fa7cc2e1251868320221d4ff77a3d48068e69f47ce1c491df2d" +source = "git+https://github.com/LimeChain/scroll-ethers-rs.git?branch=LimeChain/l1-block-hashes-poc#40e9bf536667c29971bb6bea79590f13a3f670df" dependencies = [ "async-trait", "coins-bip32", @@ -1108,7 +1053,7 @@ dependencies = [ "ethers-core", "hex", "rand", - "sha2 0.10.7", + "sha2", "thiserror", "tracing", ] @@ -1116,7 +1061,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "eth-types", "geth-utils", @@ -1137,7 +1082,6 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "bitvec", "rand_core", "subtle", ] @@ -1148,6 +1092,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ + "bitvec", "rand_core", "subtle", ] @@ -1296,22 +1241,12 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ - "digest 0.7.6", "eth-types", "halo2_proofs", - "sha3 0.7.3", - "strum", -] - -[[package]] -name = "generic-array" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d00328cedcac5e81c683e5620ca6a30756fc23027ebf9bff405c0e8da1fbb7e" -dependencies = [ - "typenum", + "sha3 0.10.8", + "strum 0.25.0", ] [[package]] @@ -1328,10 +1263,10 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ - "env_logger 0.9.3", - "gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)", + "env_logger 0.10.0", + "gobuild", "log", ] @@ -1389,16 +1324,7 @@ dependencies = [ [[package]] name = "gobuild" version = "0.1.0-alpha.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e156a4ddbf3deb5e8116946c111413bd9a5679bdc1536c78a60618a7a9ac9e" -dependencies = [ - "cc", -] - -[[package]] -name = "gobuild" -version = "0.1.0-alpha.2" -source = "git+https://github.com/scroll-tech/gobuild.git#8b84111fc3b58e2134e4794a06d1f199412cf2b0" +source = "git+https://github.com/scroll-tech/gobuild.git#24935c2b8f677841f22acd6710957621bb294e0e" dependencies = [ "cc", ] @@ -1447,11 +1373,11 @@ dependencies = [ [[package]] name = "halo2-base" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.5#70588177930400361c731659b15b2ab3f29f7784" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#b7c53bb7456063936f4ca6df8fa8e751d9c17d85" dependencies = [ - "ff 0.12.1", + "ff 0.13.0", "halo2_proofs", - "itertools", + "itertools 0.10.5", "num-bigint", "num-integer", "num-traits", @@ -1462,12 +1388,12 @@ dependencies = [ [[package]] name = "halo2-ecc" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?tag=v0.1.5#70588177930400361c731659b15b2ab3f29f7784" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#b7c53bb7456063936f4ca6df8fa8e751d9c17d85" dependencies = [ - "ff 0.12.1", - "group 0.12.1", + "ff 0.13.0", + "group 0.13.0", "halo2-base", - "itertools", + "itertools 0.10.5", "num-bigint", "num-integer", "num-traits", @@ -1481,7 +1407,7 @@ dependencies = [ [[package]] name = "halo2-gate-generator" version = "0.1.0" -source = "git+https://github.com/scroll-tech/halo2gategen.git#35b137de2f71c37dfbd236842b868013c46739d1" +source = "git+https://github.com/scroll-tech/halo2gategen.git#8ccf462e1eff4ed0e602d7ba19771b2c53dee0e3" dependencies = [ "halo2_proofs", "lazy_static", @@ -1489,46 +1415,64 @@ dependencies = [ "rand", "serde", "serde_json", - "strum", - "strum_macros", + "strum 0.24.1", + "strum_macros 0.24.3", "subtle", ] [[package]] name = "halo2-mpt-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/mpt-circuit.git?tag=v0.7.0#578c210ceb88d3c143ee2a013ad836d19285d9c1" +source = "git+https://github.com/scroll-tech/mpt-circuit.git?branch=v0.7#32ab964ff065ee6a0ccc63590b9db73238c70b81" dependencies = [ + "env_logger 0.10.0", "ethers-core", "halo2_proofs", "hex", - "itertools", + "itertools 0.10.5", "lazy_static", "log", "num-bigint", "num-traits", "poseidon-circuit", "rand", + "rand_chacha", "serde", "serde_json", - "strum", - "strum_macros", + "strum 0.24.1", + "strum_macros 0.24.3", "thiserror", ] +[[package]] +name = "halo2_gadgets" +version = "0.2.0" +source = "git+https://github.com/scroll-tech/halo2.git?branch=v1.0#04d8dc09bd4df542feccac85e34dff4b38a467d3" +dependencies = [ + "arrayvec", + "bitvec", + "ff 0.13.0", + "group 0.13.0", + "halo2_proofs", + "halo2curves 0.3.2", + "lazy_static", + "rand", + "subtle", + "uint", +] + [[package]] name = "halo2_proofs" version = "0.2.0" -source = "git+https://github.com/scroll-tech/halo2.git?branch=develop#e3fe25eadd714fd991f35190d17ff0b8fb031188" +source = "git+https://github.com/scroll-tech/halo2.git?branch=v1.0#04d8dc09bd4df542feccac85e34dff4b38a467d3" dependencies = [ "ark-std", "blake2b_simd", "cfg-if 0.1.10", "crossbeam", - "env_logger 0.8.4", - "ff 0.12.1", - "group 0.12.1", - "halo2curves", + "ff 0.13.0", + "group 0.13.0", + "halo2curves 0.1.0", "log", "num-bigint", "num-integer", @@ -1542,11 +1486,13 @@ dependencies = [ [[package]] name = "halo2curves" -version = "0.3.1" -source = "git+https://github.com/scroll-tech/halo2curves.git?branch=0.3.1-derive-serde#969f1e44d9713ee4cd552563bd0c762c5d53b56e" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6b1142bd1059aacde1b477e0c80c142910f1ceae67fc619311d6a17428007ab" dependencies = [ - "ff 0.12.1", - "group 0.12.1", + "blake2b_simd", + "ff 0.13.0", + "group 0.13.0", "lazy_static", "num-bigint", "num-traits", @@ -1555,20 +1501,27 @@ dependencies = [ "rand", "rand_core", "serde", + "serde_arrays", "static_assertions", "subtle", ] [[package]] -name = "halo2wrong" -version = "0.1.0" -source = "git+https://github.com/scroll-tech/halo2wrong.git?branch=halo2-ecc-snark-verifier-0323#939d679cb16abf0e820bd606248661e400328afa" +name = "halo2curves" +version = "0.3.2" +source = "git+https://github.com/privacy-scaling-explorations/halo2curves?tag=0.3.2#9f5c50810bbefe779ee5cf1d852b2fe85dc35d5e" dependencies = [ - "group 0.12.1", - "halo2_proofs", + "ff 0.13.0", + "group 0.13.0", + "lazy_static", "num-bigint", - "num-integer", "num-traits", + "pasta_curves", + "paste", + "rand", + "rand_core", + "static_assertions", + "subtle", ] [[package]] @@ -1634,16 +1587,6 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0" -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac", - "digest 0.9.0", -] - [[package]] name = "hmac" version = "0.12.1" @@ -1653,17 +1596,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "hmac-drbg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" -dependencies = [ - "digest 0.9.0", - "generic-array 0.14.7", - "hmac 0.8.1", -] - [[package]] name = "http" version = "0.2.9" @@ -1845,7 +1777,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -1883,6 +1815,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" @@ -1907,7 +1848,7 @@ dependencies = [ "cfg-if 1.0.0", "ecdsa 0.14.8", "elliptic-curve 0.12.3", - "sha2 0.10.7", + "sha2", "sha3 0.10.8", ] @@ -1921,7 +1862,7 @@ dependencies = [ "ecdsa 0.16.8", "elliptic-curve 0.13.5", "once_cell", - "sha2 0.10.7", + "sha2", "signature 2.1.0", ] @@ -1937,13 +1878,12 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ - "env_logger 0.9.3", + "env_logger 0.10.0", "eth-types", "halo2_proofs", - "itertools", - "lazy_static", + "itertools 0.11.0", "log", "num-bigint", "num-traits", @@ -1964,54 +1904,6 @@ version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" -[[package]] -name = "libsecp256k1" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b09eff1b35ed3b33b877ced3a691fc7a481919c7e29c53c906226fcf55e2a1" -dependencies = [ - "arrayref", - "base64 0.13.1", - "digest 0.9.0", - "hmac-drbg", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", - "rand", - "serde", - "sha2 0.9.9", - "typenum", -] - -[[package]] -name = "libsecp256k1-core" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" -dependencies = [ - "crunchy", - "digest 0.9.0", - "subtle", -] - -[[package]] -name = "libsecp256k1-gen-ecmult" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3038c808c55c87e8a172643a7d87187fc6c4174468159cb3090659d55bcb4809" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "libsecp256k1-gen-genmult" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db8d6ba2cec9eacc40e6e8ccc98931840301f1006e95647ceb2dd5c3aa06f7c" -dependencies = [ - "libsecp256k1-core", -] - [[package]] name = "linux-raw-sys" version = "0.4.3" @@ -2060,20 +1952,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "maingate" -version = "0.1.0" -source = "git+https://github.com/scroll-tech/halo2wrong.git?branch=halo2-ecc-snark-verifier-0323#939d679cb16abf0e820bd606248661e400328afa" -dependencies = [ - "group 0.12.1", - "halo2wrong", - "num-bigint", - "num-integer", - "num-traits", - "rand", - "subtle", -] - [[package]] name = "memchr" version = "2.5.0" @@ -2118,31 +1996,30 @@ dependencies = [ [[package]] name = "misc-precompiled-circuit" version = "0.1.0" -source = "git+https://github.com/scroll-tech/misc-precompiled-circuit.git?tag=v0.1.0#f647341f9951f5c2399035728d4f6765564e2e02" +source = "git+https://github.com/scroll-tech/misc-precompiled-circuit.git?branch=main#f46cf8fd0072e5531315739b20b5248f4bd2caac" dependencies = [ "halo2-gate-generator", "halo2_proofs", - "lazy_static", "num-bigint", "rand", "serde", "serde_json", - "strum", - "strum_macros", + "strum 0.25.0", + "strum_macros 0.25.3", "subtle", ] [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "eth-types", + "ethabi", "ethers-core", "ethers-signers", "external-tracer", - "itertools", - "lazy_static", + "itertools 0.11.0", "log", "rand", "rand_chacha", @@ -2151,13 +2028,12 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "eth-types", "halo2-mpt-circuits", "halo2_proofs", "hex", - "lazy_static", "log", "num-bigint", "poseidon-circuit", @@ -2390,13 +2266,13 @@ dependencies = [ [[package]] name = "pasta_curves" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc65faf8e7313b4b1fbaa9f7ca917a0eed499a9663be71477f87993604341d8" +checksum = "d3e57598f73cc7e1b2ac63c79c517b31a0877cd7c402cdcaa311b5208de7a095" dependencies = [ "blake2b_simd", - "ff 0.12.1", - "group 0.12.1", + "ff 0.13.0", + "group 0.13.0", "lazy_static", "rand", "static_assertions", @@ -2425,7 +2301,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" dependencies = [ "digest 0.10.7", - "hmac 0.12.1", + "hmac", ] [[package]] @@ -2489,19 +2365,19 @@ dependencies = [ [[package]] name = "poseidon" version = "0.2.0" -source = "git+https://github.com/scroll-tech/poseidon.git?branch=scroll-dev-0220#2fb4a2385bada39b50dce12fe50cb80d2fd33476" +source = "git+https://github.com/scroll-tech/poseidon.git?branch=main#5787dd3d2ce7a9e9601a035c396ac0c03449b54d" dependencies = [ - "group 0.12.1", - "halo2curves", + "halo2curves 0.1.0", "subtle", ] [[package]] name = "poseidon-circuit" version = "0.1.0" -source = "git+https://github.com/scroll-tech/poseidon-circuit.git?branch=scroll-dev-0901#69524f42bdc55c581088c2fe64c2ab9a2921146b" +source = "git+https://github.com/scroll-tech/poseidon-circuit.git?branch=scroll-dev-1201#c6f058bcf3bb0c7933d1979563c414f5cc480f25" dependencies = [ "bitvec", + "ff 0.13.0", "halo2_proofs", "lazy_static", "log", @@ -2582,7 +2458,7 @@ dependencies = [ [[package]] name = "prover" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "aggregator", "anyhow", @@ -2596,19 +2472,18 @@ dependencies = [ "git-version", "halo2_proofs", "hex", - "itertools", + "itertools 0.11.0", "log", "log4rs", "mpt-zktrie", "num-bigint", - "once_cell", "rand", "rand_xorshift", "serde", "serde_derive", "serde_json", "serde_stacker", - "sha2 0.10.7", + "sha2", "snark-verifier", "snark-verifier-sdk", "zkevm-circuits", @@ -2813,7 +2688,7 @@ dependencies = [ "revm-primitives", "ripemd", "secp256k1 0.26.0", - "sha2 0.10.7", + "sha2", "sha3 0.10.8", "substrate-bn", ] @@ -2849,7 +2724,7 @@ dependencies = [ "primitive-types", "ripemd", "secp256k1 0.24.3", - "sha2 0.10.7", + "sha2", "sha3 0.10.8", "substrate-bn", ] @@ -2861,7 +2736,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" dependencies = [ "crypto-bigint 0.4.9", - "hmac 0.12.1", + "hmac", "zeroize", ] @@ -2871,7 +2746,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ - "hmac 0.12.1", + "hmac", "subtle", ] @@ -3077,10 +2952,10 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" dependencies = [ - "hmac 0.12.1", + "hmac", "pbkdf2 0.11.0", "salsa20", - "sha2 0.10.7", + "sha2", ] [[package]] @@ -3101,7 +2976,7 @@ checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" dependencies = [ "base16ct 0.1.1", "der 0.6.1", - "generic-array 0.14.7", + "generic-array", "subtle", "zeroize", ] @@ -3114,7 +2989,7 @@ checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct 0.2.0", "der 0.7.8", - "generic-array 0.14.7", + "generic-array", "pkcs8", "subtle", "zeroize", @@ -3183,6 +3058,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_arrays" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38636132857f68ec3d5f3eb121166d2af33cb55174c4d5ff645db6165cbef0fd" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" version = "1.0.178" @@ -3260,19 +3144,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - [[package]] name = "sha2" version = "0.10.7" @@ -3284,18 +3155,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha3" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b64dcef59ed4290b9fb562b53df07f564690d6539e8ecdd4728cf392477530bc" -dependencies = [ - "block-buffer 0.3.3", - "byte-tools", - "digest 0.7.6", - "keccak", -] - [[package]] name = "sha3" version = "0.9.1" @@ -3356,14 +3215,14 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "snark-verifier" version = "0.1.0" -source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.5#bc1d39ae31f3fe520c51dd150f0fefaf9653c465" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c1121a855564936a267b37bc9c27306de3eb3b" dependencies = [ "bytes", "ethereum-types", "halo2-base", "halo2-ecc", "hex", - "itertools", + "itertools 0.10.5", "lazy_static", "num-bigint", "num-integer", @@ -3380,14 +3239,14 @@ dependencies = [ [[package]] name = "snark-verifier-sdk" version = "0.0.1" -source = "git+https://github.com/scroll-tech/snark-verifier?tag=v0.1.5#bc1d39ae31f3fe520c51dd150f0fefaf9653c465" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c1121a855564936a267b37bc9c27306de3eb3b" dependencies = [ "bincode", - "env_logger 0.10.0", "ethereum-types", + "ff 0.13.0", "halo2-base", "hex", - "itertools", + "itertools 0.10.5", "lazy_static", "log", "num-bigint", @@ -3457,9 +3316,15 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", ] +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" + [[package]] name = "strum_macros" version = "0.24.3" @@ -3473,6 +3338,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.27", +] + [[package]] name = "substrate-bn" version = "0.6.0" @@ -4125,32 +4003,31 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.9.7#2055cc0bb970aa28d597c945b6078e2469af8862" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" dependencies = [ "array-init", "bus-mapping", "either", - "env_logger 0.9.3", + "env_logger 0.10.0", "eth-types", + "ethabi", "ethers-core", "ethers-signers", + "ff 0.13.0", "gadgets", "halo2-base", "halo2-ecc", + "halo2_gadgets", "halo2_proofs", "hex", - "itertools", + "itertools 0.11.0", "keccak256", - "lazy_static", - "libsecp256k1", "log", - "maingate", "misc-precompiled-circuit", "mock", "mpt-zktrie", "num", "num-bigint", - "once_cell", "poseidon-circuit", "rand", "rand_chacha", @@ -4161,8 +4038,8 @@ dependencies = [ "sha3 0.10.8", "snark-verifier", "snark-verifier-sdk", - "strum", - "strum_macros", + "strum 0.25.0", + "strum_macros 0.25.3", "subtle", ] @@ -4172,6 +4049,7 @@ version = "0.1.0" dependencies = [ "base64 0.13.1", "env_logger 0.9.3", + "gobuild", "halo2_proofs", "libc", "log", @@ -4185,7 +4063,17 @@ dependencies = [ [[package]] name = "zktrie" version = "0.2.0" -source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.6#83318659773604fa565e2ebeb810a6d3746f0af4" +source = "git+https://github.com/scroll-tech/zktrie.git?tag=v0.7.1#a12f2f262ad3e82301e39ecdf9bfe235befc7074" dependencies = [ - "gobuild 0.1.0-alpha.2 (registry+https://github.com/rust-lang/crates.io-index)", + "gobuild", ] + +[[patch.unused]] +name = "ethers" +version = "2.0.7" +source = "git+https://github.com/LimeChain/scroll-ethers-rs.git?branch=LimeChain/l1-block-hashes-poc#40e9bf536667c29971bb6bea79590f13a3f670df" + +[[patch.unused]] +name = "ethers-etherscan" +version = "2.0.7" +source = "git+https://github.com/LimeChain/scroll-ethers-rs.git?branch=LimeChain/l1-block-hashes-poc#40e9bf536667c29971bb6bea79590f13a3f670df" diff --git a/common/libzkp/impl/Cargo.toml b/common/libzkp/impl/Cargo.toml index 5402eb85f6..e5a953e6b2 100644 --- a/common/libzkp/impl/Cargo.toml +++ b/common/libzkp/impl/Cargo.toml @@ -8,19 +8,22 @@ edition = "2021" crate-type = ["cdylib"] [patch.crates-io] -ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc", features = ["scroll"] } +ethers-core = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" } +ethers-providers = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" } +ethers = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" } +ethers-etherscan = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" } +ethers-signers = { git = "https://github.com/LimeChain/scroll-ethers-rs.git", branch = "LimeChain/l1-block-hashes-poc" } +gobuild = { git = "https://github.com/scroll-tech/gobuild.git" } [patch."https://github.com/privacy-scaling-explorations/halo2.git"] -halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" } +halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.0" } [patch."https://github.com/privacy-scaling-explorations/poseidon.git"] -poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "scroll-dev-0220" } -[patch."https://github.com/privacy-scaling-explorations/halo2wrong.git"] -halo2wrong = { git = "https://github.com/scroll-tech/halo2wrong.git", branch = "halo2-ecc-snark-verifier-0323" } -maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-ecc-snark-verifier-0323" } -[patch."https://github.com/privacy-scaling-explorations/halo2curves.git"] -halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" } +poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "main" } + +[build-dependencies] +gobuild = { git = "https://github.com/scroll-tech/gobuild.git" } [dependencies] -halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" } +halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.0" } prover = { git = "https://github.com/LimeChain/scroll-zkevm-circuits", branch = "LimeChain/wip-block-hashes-poc", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] } base64 = "0.13.0" diff --git a/common/libzkp/impl/rust-toolchain b/common/libzkp/impl/rust-toolchain index 44b874bfca..27c108be5c 100644 --- a/common/libzkp/impl/rust-toolchain +++ b/common/libzkp/impl/rust-toolchain @@ -1 +1 @@ -nightly-2022-12-10 +nightly-2023-12-03 diff --git a/common/libzkp/impl/src/batch.rs b/common/libzkp/impl/src/batch.rs index b4512c6697..015f496cc3 100644 --- a/common/libzkp/impl/src/batch.rs +++ b/common/libzkp/impl/src/batch.rs @@ -10,7 +10,7 @@ use prover::{ aggregator::{Prover, Verifier}, consts::AGG_VK_FILENAME, utils::{chunk_trace_to_witness_block, init_env_and_log}, - BatchProof, BlockTrace, ChunkHash, ChunkProof, + BatchProof, ChunkHash, ChunkProof, ChunkTrace, }; use std::{cell::OnceCell, env, ptr::null}; @@ -119,7 +119,7 @@ pub unsafe extern "C" fn gen_batch_proof( let chunk_hashes_proofs = chunk_hashes .into_iter() - .zip(chunk_proofs.into_iter()) + .zip(chunk_proofs) .collect(); let proof = PROVER @@ -159,11 +159,11 @@ pub unsafe extern "C" fn verify_batch_proof(proof: *const c_char) -> c_char { // This function is only used for debugging on Go side. /// # Safety #[no_mangle] -pub unsafe extern "C" fn block_traces_to_chunk_info(block_traces: *const c_char) -> *const c_char { - let block_traces = c_char_to_vec(block_traces); - let block_traces = serde_json::from_slice::>(&block_traces).unwrap(); +pub unsafe extern "C" fn chunk_trace_to_chunk_info(chunk_trace: *const c_char) -> *const c_char { + let chunk_trace = c_char_to_vec(chunk_trace); + let chunk_trace = serde_json::from_slice::(&chunk_trace).unwrap(); - let witness_block = chunk_trace_to_witness_block(block_traces).unwrap(); + let witness_block = chunk_trace_to_witness_block(chunk_trace).unwrap(); let chunk_info = ChunkHash::from_witness_block(&witness_block, false); let chunk_info_bytes = serde_json::to_vec(&chunk_info).unwrap(); diff --git a/common/libzkp/impl/src/lib.rs b/common/libzkp/impl/src/lib.rs index 49af267459..94939b374b 100644 --- a/common/libzkp/impl/src/lib.rs +++ b/common/libzkp/impl/src/lib.rs @@ -1,4 +1,3 @@ -#![feature(once_cell)] mod batch; mod chunk; diff --git a/common/libzkp/interface/libzkp.h b/common/libzkp/interface/libzkp.h index 7fe37a1c58..8f6a11072f 100644 --- a/common/libzkp/interface/libzkp.h +++ b/common/libzkp/interface/libzkp.h @@ -11,5 +11,5 @@ char* get_chunk_vk(); char* gen_chunk_proof(char* chunk_trace); char verify_chunk_proof(char* proof); -char* block_traces_to_chunk_info(char* block_traces); +char* chunk_trace_to_chunk_info(char* chunk_trace); void free_c_chars(char* ptr); diff --git a/prover/core/prover.go b/prover/core/prover.go index f2aa14e1db..b7548ebca8 100644 --- a/prover/core/prover.go +++ b/prover/core/prover.go @@ -17,7 +17,6 @@ import ( "path/filepath" "unsafe" - "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" "scroll-tech/common/types/message" @@ -132,12 +131,12 @@ func (p *ProverCore) ProveChunk( } // TracesToChunkInfo convert traces to chunk info -func (p *ProverCore) TracesToChunkInfo(traces []*types.BlockTrace) (*message.ChunkInfo, error) { - tracesByt, err := json.Marshal(traces) +func (p *ProverCore) TracesToChunkInfo(trace *message.ChunkTrace) (*message.ChunkInfo, error) { + traceByt, err := json.Marshal(trace) if err != nil { return nil, err } - chunkInfoByt := p.tracesToChunkInfo(tracesByt) + chunkInfoByt := p.tracesToChunkInfo(traceByt) chunkInfo := &message.ChunkInfo{} return chunkInfo, json.Unmarshal(chunkInfoByt, chunkInfo) @@ -249,11 +248,11 @@ func (p *ProverCore) mayDumpProof(id string, proofByt []byte) error { return err } -func (p *ProverCore) tracesToChunkInfo(tracesByt []byte) []byte { - tracesStr := C.CString(string(tracesByt)) - defer C.free(unsafe.Pointer(tracesStr)) +func (p *ProverCore) tracesToChunkInfo(traceByt []byte) []byte { + traceStr := C.CString(string(traceByt)) + defer C.free(unsafe.Pointer(traceStr)) - cChunkInfo := C.block_traces_to_chunk_info(tracesStr) + cChunkInfo := C.chunk_trace_to_chunk_info(traceStr) defer C.free_c_chars(cChunkInfo) chunkInfo := C.GoString(cChunkInfo) From 4d99b588db9d0dcc242659e1954e3a8a6444ec11 Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 16 Jan 2024 11:19:10 +0200 Subject: [PATCH 53/59] fix: add update libzkp dep --- common/libzkp/impl/Cargo.lock | 110 +++++++++++++++++++++++----------- 1 file changed, 76 insertions(+), 34 deletions(-) diff --git a/common/libzkp/impl/Cargo.lock b/common/libzkp/impl/Cargo.lock index 1dd10d5c67..4fc9e9612c 100644 --- a/common/libzkp/impl/Cargo.lock +++ b/common/libzkp/impl/Cargo.lock @@ -31,13 +31,13 @@ dependencies = [ [[package]] name = "aggregator" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "ark-std", "env_logger 0.10.0", "eth-types", "ethers-core", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "itertools 0.11.0", "log", @@ -323,14 +323,14 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" [[package]] name = "bus-mapping" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "eth-types", "ethers-core", "ethers-providers", "ethers-signers", "gadgets", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "itertools 0.11.0", "log", @@ -908,12 +908,12 @@ dependencies = [ [[package]] name = "eth-types" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "ethers-core", "ethers-signers", "halo2-base", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "itertools 0.11.0", "num", @@ -1061,7 +1061,7 @@ dependencies = [ [[package]] name = "external-tracer" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "eth-types", "geth-utils", @@ -1241,10 +1241,10 @@ dependencies = [ [[package]] name = "gadgets" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "eth-types", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "sha3 0.10.8", "strum 0.25.0", ] @@ -1263,7 +1263,7 @@ dependencies = [ [[package]] name = "geth-utils" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "env_logger 0.10.0", "gobuild", @@ -1373,10 +1373,10 @@ dependencies = [ [[package]] name = "halo2-base" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#b7c53bb7456063936f4ca6df8fa8e751d9c17d85" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#40ba7e3bbf013b55c59283534c9489701f9212d0" dependencies = [ "ff 0.13.0", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.1)", "itertools 0.10.5", "num-bigint", "num-integer", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "halo2-ecc" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#b7c53bb7456063936f4ca6df8fa8e751d9c17d85" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#40ba7e3bbf013b55c59283534c9489701f9212d0" dependencies = [ "ff 0.13.0", "group 0.13.0", @@ -1409,7 +1409,7 @@ name = "halo2-gate-generator" version = "0.1.0" source = "git+https://github.com/scroll-tech/halo2gategen.git#8ccf462e1eff4ed0e602d7ba19771b2c53dee0e3" dependencies = [ - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "lazy_static", "num-bigint", "rand", @@ -1427,7 +1427,7 @@ source = "git+https://github.com/scroll-tech/mpt-circuit.git?branch=v0.7#32ab964 dependencies = [ "env_logger 0.10.0", "ethers-core", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "itertools 0.10.5", "lazy_static", @@ -1453,7 +1453,7 @@ dependencies = [ "bitvec", "ff 0.13.0", "group 0.13.0", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "halo2curves 0.3.2", "lazy_static", "rand", @@ -1484,6 +1484,31 @@ dependencies = [ "tracing", ] +[[package]] +name = "halo2_proofs" +version = "0.2.0" +source = "git+https://github.com/scroll-tech/halo2.git?branch=v1.1#0008b866c1b458bf7129fcd88e877d5b58c25e2f" +dependencies = [ + "ark-std", + "blake2b_simd", + "cfg-if 0.1.10", + "crossbeam", + "ff 0.13.0", + "group 0.13.0", + "halo2curves 0.1.0", + "log", + "maybe-rayon", + "num-bigint", + "num-integer", + "poseidon", + "rand_chacha", + "rand_core", + "rayon", + "sha3 0.9.1", + "subtle", + "tracing", +] + [[package]] name = "halo2curves" version = "0.1.0" @@ -1824,6 +1849,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" @@ -1878,11 +1912,11 @@ dependencies = [ [[package]] name = "keccak256" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "env_logger 0.10.0", "eth-types", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "itertools 0.11.0", "log", "num-bigint", @@ -1952,6 +1986,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "maybe-rayon" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" +dependencies = [ + "cfg-if 1.0.0", + "rayon", +] + [[package]] name = "memchr" version = "2.5.0" @@ -1999,7 +2043,7 @@ version = "0.1.0" source = "git+https://github.com/scroll-tech/misc-precompiled-circuit.git?branch=main#f46cf8fd0072e5531315739b20b5248f4bd2caac" dependencies = [ "halo2-gate-generator", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "num-bigint", "rand", "serde", @@ -2012,7 +2056,7 @@ dependencies = [ [[package]] name = "mock" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "eth-types", "ethabi", @@ -2028,11 +2072,11 @@ dependencies = [ [[package]] name = "mpt-zktrie" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "eth-types", "halo2-mpt-circuits", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "log", "num-bigint", @@ -2378,7 +2422,7 @@ source = "git+https://github.com/scroll-tech/poseidon-circuit.git?branch=scroll- dependencies = [ "bitvec", "ff 0.13.0", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "lazy_static", "log", "rand", @@ -2458,7 +2502,7 @@ dependencies = [ [[package]] name = "prover" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "aggregator", "anyhow", @@ -2470,7 +2514,7 @@ dependencies = [ "eth-types", "ethers-core", "git-version", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "itertools 0.11.0", "log", @@ -3215,15 +3259,14 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "snark-verifier" version = "0.1.0" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c1121a855564936a267b37bc9c27306de3eb3b" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#8085ab39dd8386b804083951d85e609b79af87c9" dependencies = [ "bytes", "ethereum-types", "halo2-base", "halo2-ecc", "hex", - "itertools 0.10.5", - "lazy_static", + "itertools 0.12.0", "num-bigint", "num-integer", "num-traits", @@ -3239,15 +3282,14 @@ dependencies = [ [[package]] name = "snark-verifier-sdk" version = "0.0.1" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c1121a855564936a267b37bc9c27306de3eb3b" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#8085ab39dd8386b804083951d85e609b79af87c9" dependencies = [ "bincode", "ethereum-types", "ff 0.13.0", "halo2-base", "hex", - "itertools 0.10.5", - "lazy_static", + "itertools 0.12.0", "log", "num-bigint", "num-integer", @@ -4003,7 +4045,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" [[package]] name = "zkevm-circuits" version = "0.1.0" -source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#95086e12dc3d1fa343886d89aec1082876bfa183" +source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "array-init", "bus-mapping", @@ -4018,7 +4060,7 @@ dependencies = [ "halo2-base", "halo2-ecc", "halo2_gadgets", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "hex", "itertools 0.11.0", "keccak256", @@ -4050,7 +4092,7 @@ dependencies = [ "base64 0.13.1", "env_logger 0.9.3", "gobuild", - "halo2_proofs", + "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", "libc", "log", "once_cell", From 23ef6a1f923c581e707eedaac7262c08c28c6c5b Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 16 Jan 2024 11:21:57 +0200 Subject: [PATCH 54/59] fix: add fix libzkp cargo deps --- common/libzkp/impl/Cargo.lock | 88 +++++++++-------------------------- 1 file changed, 23 insertions(+), 65 deletions(-) diff --git a/common/libzkp/impl/Cargo.lock b/common/libzkp/impl/Cargo.lock index 4fc9e9612c..4390555c76 100644 --- a/common/libzkp/impl/Cargo.lock +++ b/common/libzkp/impl/Cargo.lock @@ -37,7 +37,7 @@ dependencies = [ "env_logger 0.10.0", "eth-types", "ethers-core", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "itertools 0.11.0", "log", @@ -330,7 +330,7 @@ dependencies = [ "ethers-providers", "ethers-signers", "gadgets", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "itertools 0.11.0", "log", @@ -913,7 +913,7 @@ dependencies = [ "ethers-core", "ethers-signers", "halo2-base", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "itertools 0.11.0", "num", @@ -1244,7 +1244,7 @@ version = "0.1.0" source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChain/wip-block-hashes-poc#27b1ffe635250dc35c0fd6888938c0a8d82ccc71" dependencies = [ "eth-types", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "sha3 0.10.8", "strum 0.25.0", ] @@ -1373,10 +1373,10 @@ dependencies = [ [[package]] name = "halo2-base" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#40ba7e3bbf013b55c59283534c9489701f9212d0" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#b7c53bb7456063936f4ca6df8fa8e751d9c17d85" dependencies = [ "ff 0.13.0", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.1)", + "halo2_proofs", "itertools 0.10.5", "num-bigint", "num-integer", @@ -1388,7 +1388,7 @@ dependencies = [ [[package]] name = "halo2-ecc" version = "0.2.2" -source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#40ba7e3bbf013b55c59283534c9489701f9212d0" +source = "git+https://github.com/scroll-tech/halo2-lib?branch=develop#b7c53bb7456063936f4ca6df8fa8e751d9c17d85" dependencies = [ "ff 0.13.0", "group 0.13.0", @@ -1409,7 +1409,7 @@ name = "halo2-gate-generator" version = "0.1.0" source = "git+https://github.com/scroll-tech/halo2gategen.git#8ccf462e1eff4ed0e602d7ba19771b2c53dee0e3" dependencies = [ - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "lazy_static", "num-bigint", "rand", @@ -1427,7 +1427,7 @@ source = "git+https://github.com/scroll-tech/mpt-circuit.git?branch=v0.7#32ab964 dependencies = [ "env_logger 0.10.0", "ethers-core", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "itertools 0.10.5", "lazy_static", @@ -1453,7 +1453,7 @@ dependencies = [ "bitvec", "ff 0.13.0", "group 0.13.0", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "halo2curves 0.3.2", "lazy_static", "rand", @@ -1484,31 +1484,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "halo2_proofs" -version = "0.2.0" -source = "git+https://github.com/scroll-tech/halo2.git?branch=v1.1#0008b866c1b458bf7129fcd88e877d5b58c25e2f" -dependencies = [ - "ark-std", - "blake2b_simd", - "cfg-if 0.1.10", - "crossbeam", - "ff 0.13.0", - "group 0.13.0", - "halo2curves 0.1.0", - "log", - "maybe-rayon", - "num-bigint", - "num-integer", - "poseidon", - "rand_chacha", - "rand_core", - "rayon", - "sha3 0.9.1", - "subtle", - "tracing", -] - [[package]] name = "halo2curves" version = "0.1.0" @@ -1849,15 +1824,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.9" @@ -1916,7 +1882,7 @@ source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChai dependencies = [ "env_logger 0.10.0", "eth-types", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "itertools 0.11.0", "log", "num-bigint", @@ -1986,16 +1952,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "maybe-rayon" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" -dependencies = [ - "cfg-if 1.0.0", - "rayon", -] - [[package]] name = "memchr" version = "2.5.0" @@ -2043,7 +1999,7 @@ version = "0.1.0" source = "git+https://github.com/scroll-tech/misc-precompiled-circuit.git?branch=main#f46cf8fd0072e5531315739b20b5248f4bd2caac" dependencies = [ "halo2-gate-generator", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "num-bigint", "rand", "serde", @@ -2076,7 +2032,7 @@ source = "git+https://github.com/LimeChain/scroll-zkevm-circuits?branch=LimeChai dependencies = [ "eth-types", "halo2-mpt-circuits", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "log", "num-bigint", @@ -2422,7 +2378,7 @@ source = "git+https://github.com/scroll-tech/poseidon-circuit.git?branch=scroll- dependencies = [ "bitvec", "ff 0.13.0", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "lazy_static", "log", "rand", @@ -2514,7 +2470,7 @@ dependencies = [ "eth-types", "ethers-core", "git-version", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "itertools 0.11.0", "log", @@ -3259,14 +3215,15 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "snark-verifier" version = "0.1.0" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#8085ab39dd8386b804083951d85e609b79af87c9" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c1121a855564936a267b37bc9c27306de3eb3b" dependencies = [ "bytes", "ethereum-types", "halo2-base", "halo2-ecc", "hex", - "itertools 0.12.0", + "itertools 0.10.5", + "lazy_static", "num-bigint", "num-integer", "num-traits", @@ -3282,14 +3239,15 @@ dependencies = [ [[package]] name = "snark-verifier-sdk" version = "0.0.1" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#8085ab39dd8386b804083951d85e609b79af87c9" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c1121a855564936a267b37bc9c27306de3eb3b" dependencies = [ "bincode", "ethereum-types", "ff 0.13.0", "halo2-base", "hex", - "itertools 0.12.0", + "itertools 0.10.5", + "lazy_static", "log", "num-bigint", "num-integer", @@ -4060,7 +4018,7 @@ dependencies = [ "halo2-base", "halo2-ecc", "halo2_gadgets", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "hex", "itertools 0.11.0", "keccak256", @@ -4092,7 +4050,7 @@ dependencies = [ "base64 0.13.1", "env_logger 0.9.3", "gobuild", - "halo2_proofs 0.2.0 (git+https://github.com/scroll-tech/halo2.git?branch=v1.0)", + "halo2_proofs", "libc", "log", "once_cell", From bdd3224ba2dd77979c9ce29b7d53e1639c5ee2d2 Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 16 Jan 2024 13:28:52 +0200 Subject: [PATCH 55/59] fix(rollup/l2_watcher): missing l1blockhashes sender --- rollup/internal/controller/watcher/l2_watcher.go | 1 + 1 file changed, 1 insertion(+) diff --git a/rollup/internal/controller/watcher/l2_watcher.go b/rollup/internal/controller/watcher/l2_watcher.go index 19f068172b..4685e5e390 100644 --- a/rollup/internal/controller/watcher/l2_watcher.go +++ b/rollup/internal/controller/watcher/l2_watcher.go @@ -160,6 +160,7 @@ func txsToTxsData(txs gethTypes.Transactions) []*types.TransactionData { } if l1blockHashesTx := tx.AsL1BlockHashesTx(); l1blockHashesTx != nil { + txData.From = l1blockHashesTx.Sender txData.FirstAppliedL1Block = (*hexutil.Uint64)(&l1blockHashesTx.FirstAppliedL1Block) txData.LastAppliedL1Block = (*hexutil.Uint64)(&l1blockHashesTx.LastAppliedL1Block) txData.BlockRangeHash = l1blockHashesTx.BlockHashesRange From 2395b26644b9ddb2be996105a9950a2e5ca87c52 Mon Sep 17 00:00:00 2001 From: failfmi Date: Thu, 1 Feb 2024 11:03:58 +0200 Subject: [PATCH 56/59] feat(rollup-relayer): latest go-ethereum hash --- go.work.sum | 2 ++ rollup/go.mod | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/go.work.sum b/go.work.sum index 10ca8b67e1..5d3989f04a 100644 --- a/go.work.sum +++ b/go.work.sum @@ -532,6 +532,8 @@ github.com/limechain/scroll-go-ethereum v0.0.0-20231223062238-eaa38f09ce93 h1:sj github.com/limechain/scroll-go-ethereum v0.0.0-20231223062238-eaa38f09ce93/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= github.com/limechain/scroll-go-ethereum v0.0.0-20240111134224-79313f78bdcb h1:cBgnLqPgdilOqsOKz3aYlTuawDReQuuGE/5HTHUzmSo= github.com/limechain/scroll-go-ethereum v0.0.0-20240111134224-79313f78bdcb/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= +github.com/limechain/scroll-go-ethereum v0.0.0-20240201082044-06393c57c762 h1:1a/GtdrINsxWMFiMFsn4xRzsK8FUXLpPM6Nl4J2eq3U= +github.com/limechain/scroll-go-ethereum v0.0.0-20240201082044-06393c57c762/go.mod h1:4HrFcoStbViFVy/9l/rvKl1XmizVAaPdgqI8v0U8hOc= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= diff --git a/rollup/go.mod b/rollup/go.mod index a56c069105..5b3176fa2c 100644 --- a/rollup/go.mod +++ b/rollup/go.mod @@ -87,4 +87,4 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum 79313f78bdcb3b5101459752de1926999db9eb2d +replace github.com/scroll-tech/go-ethereum => github.com/limechain/scroll-go-ethereum 06393c57c762bc5f88208a27ec277995e5462d19 From 0dd91b6bd5bed5e187278dbabbe4d534e05a651d Mon Sep 17 00:00:00 2001 From: failfmi Date: Tue, 13 Feb 2024 18:57:35 +0200 Subject: [PATCH 57/59] test(contracts/ScrollChain): examples with 1, 2 and 256 block hashes --- contracts/src/test/ScrollChain.t.sol | 333 +++++++++++++++++++++++++++ 1 file changed, 333 insertions(+) diff --git a/contracts/src/test/ScrollChain.t.sol b/contracts/src/test/ScrollChain.t.sol index 0bf9aa85af..3f6a28cdac 100644 --- a/contracts/src/test/ScrollChain.t.sol +++ b/contracts/src/test/ScrollChain.t.sol @@ -690,6 +690,339 @@ contract ScrollChainTest is DSTestPlus { assertBoolEq(rollup.isSequencer(_sequencer), false); } + // commit batch, one chunk with one block, 1 tx, 1 L1 message, no skip, 1 block hash range + function testCommitBatchOneBlockHash() public { + rollup.addSequencer(address(0)); + rollup.addProver(address(0)); + + // Roll 256 blocks + hevm.roll(256); + + // import 300 L1 messages + for (uint256 i = 0; i < 300; i++) { + messageQueue.appendCrossDomainMessage(address(this), 1000000, new bytes(0)); + } + + // import genesis batch first + bytes memory batchHeader0 = new bytes(129); + assembly { + mstore(add(batchHeader0, add(0x20, 25)), 1) + } + rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1))); + bytes32 batchHash0 = rollup.committedBatches(0); + + bytes memory bitmap; + // have only one chunk + bytes memory chunk0; + + // commit batch1, one chunk with one block, 1 tx, 1 L1 message, no skip + // => payload for data hash of chunk0 + // 0000000000000000 - blockNumber + // 0000000000000000 - timestamp + // 0000000000000000000000000000000000000000000000000000000000000000 - baseFee + // 0000000000000000 - gasLimit + // 0001 - numTransactions + // 0001- numL1Messages + // 0000000000000001 - lastAppliedL1Block + // a2277fd30bbbe74323309023b56035b376d7768ad237ae4fc46ead7dc9591ae1 - L1 Message Tx Hash + // 0000000000000001 - chunk 0 - lastAppliedL1Block + // b10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6 - chunk 0 - l1BlockRangeHash + // => data hash for all chunks + // 86fcdd7b593809d108dae8a3a696e5ae4af774943f15cc2fd3c39cd02dabd0d7 + // => payload for batch header + // 00 + // 0000000000000001 + // 0000000000000001 + // 0000000000000001 + // 86fcdd7b593809d108dae8a3a696e5ae4af774943f15cc2fd3c39cd02dabd0d7 + // 743dab51a4c73747185caad9effa81411a067f3d7aa69d69d4b7f3e9802a71c4 + // 0000000000000000000000000000000000000000000000000000000000000000 + // 0000000000000001 + // b5d9d894133a730aa651ef62d26b0ffa846233c74177a591a4a896adfda97d22 + // => hash for batch header + // b6448e0bbd8226646f2099cd47160478768e5ccc32b486189073dfcedbad34e3 + bytes memory batchHeader1 = new bytes(129 + 32); + assembly { + mstore(add(batchHeader1, 0x20), 0) // version + mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex = 1 + mstore(add(batchHeader1, add(0x20, 9)), shl(192, 1)) // l1MessagePopped = 1 + mstore(add(batchHeader1, add(0x20, 17)), shl(192, 1)) // totalL1MessagePopped = 1 + mstore(add(batchHeader1, add(0x20, 25)), 0x86fcdd7b593809d108dae8a3a696e5ae4af774943f15cc2fd3c39cd02dabd0d7) // dataHash + mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash + mstore(add(batchHeader1, add(0x20, 89)), 0) // bitmap0 + mstore(add(batchHeader1, add(0x20, 121)), shl(192, 1)) // lastAppliedL1Block + mstore( + add(batchHeader1, add(0x20, 129)), + 0xb5d9d894133a730aa651ef62d26b0ffa846233c74177a591a4a896adfda97d22 + ) // blockRangeHash + } + + // set chunk data + chunk0 = new bytes(1 + 108); + assembly { + mstore(add(chunk0, 0x20), shl(248, 1)) // numBlocks = 1 + mstore(add(chunk0, add(0x21, 56)), shl(240, 1)) // numTransactions = 1, block 0 + mstore(add(chunk0, add(0x21, 58)), shl(240, 1)) // numL1Messages = 1, block 0 + mstore(add(chunk0, add(0x21, 60)), shl(192, 1)) // lastAppliedL1Block = 1, block 0 + mstore(add(chunk0, add(0x20, 69)), shl(192, 1)) // lastAppliedL1Block = 1, chunk 0 + mstore(add(chunk0, add(0x20, 77)), 0xb10e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf6) // blockRangeHash + } + + bytes[] memory chunks = new bytes[](1); + chunks[0] = chunk0; + bitmap = new bytes(32); + + hevm.startPrank(address(0)); + hevm.expectEmit(true, true, false, true); + emit CommitBatch(1, bytes32(0xb6448e0bbd8226646f2099cd47160478768e5ccc32b486189073dfcedbad34e3)); + rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); + hevm.stopPrank(); + assertBoolEq(rollup.isBatchFinalized(1), false); + bytes32 batchHash1 = rollup.committedBatches(1); + assertEq(batchHash1, bytes32(0xb6448e0bbd8226646f2099cd47160478768e5ccc32b486189073dfcedbad34e3)); + + // finalize batch1 + hevm.startPrank(address(0)); + hevm.expectEmit(true, true, false, true); + emit FinalizeBatch(1, batchHash1, bytes32(uint256(2)), bytes32(uint256(3))); + rollup.finalizeBatchWithProof( + batchHeader1, + bytes32(uint256(1)), + bytes32(uint256(2)), + bytes32(uint256(3)), + new bytes(0) + ); + hevm.stopPrank(); + assertBoolEq(rollup.isBatchFinalized(1), true); + assertEq(rollup.finalizedStateRoots(1), bytes32(uint256(2))); + assertEq(rollup.withdrawRoots(1), bytes32(uint256(3))); + assertEq(rollup.lastFinalizedBatchIndex(), 1); + assertBoolEq(messageQueue.isMessageSkipped(0), false); + assertEq(messageQueue.pendingQueueIndex(), 1); + } + + // commit batch, one chunk with one block, 1 tx, 1 L1 message, no skip, 2 block hash range + function testCommitBatchTwoBlockHash() public { + rollup.addSequencer(address(0)); + rollup.addProver(address(0)); + + // Roll 256 blocks + hevm.roll(256); + + // import 300 L1 messages + for (uint256 i = 0; i < 300; i++) { + messageQueue.appendCrossDomainMessage(address(this), 1000000, new bytes(0)); + } + + // import genesis batch first + bytes memory batchHeader0 = new bytes(129); + assembly { + mstore(add(batchHeader0, add(0x20, 25)), 1) + } + rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1))); + bytes32 batchHash0 = rollup.committedBatches(0); + + bytes memory bitmap; + // have only one chunk + bytes memory chunk0; + + // commit batch1, one chunk with one block, 1 tx, 1 L1 message, no skip + // => payload for data hash of chunk0 + // 0000000000000000 - blockNumber + // 0000000000000000 - timestamp + // 0000000000000000000000000000000000000000000000000000000000000000 - baseFee + // 0000000000000000 - gasLimit + // 0001 - numTransactions + // 0001- numL1Messages + // 0000000000000002 - lastAppliedL1Block + // a2277fd30bbbe74323309023b56035b376d7768ad237ae4fc46ead7dc9591ae1 - L1 Message Tx Hash + // 0000000000000002 - chunk 0 - lastAppliedL1Block + // e90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0 - chunk 0 - l1BlockRangeHash + // => data hash for all chunks + // 7b57de313b5f6add99c3c80246deb99fbdda3735a95e4fb2dded72c874d0c116 + // => payload for batch header + // 00 + // 0000000000000001 + // 0000000000000001 + // 0000000000000001 + // 7b57de313b5f6add99c3c80246deb99fbdda3735a95e4fb2dded72c874d0c116 + // 743dab51a4c73747185caad9effa81411a067f3d7aa69d69d4b7f3e9802a71c4 + // 0000000000000000000000000000000000000000000000000000000000000000 + // 0000000000000002 + // 7fef4bf8f63cf9dd467136c679c02b5c17fcf6322d9562512bf5eb952cf7cc53 + // => hash for batch header + // 17a2afe6fe48a603684b7bd5d049de9b8d2093d8c8d927d778cf87a5a553d92c + bytes memory batchHeader1 = new bytes(129 + 32); + assembly { + mstore(add(batchHeader1, 0x20), 0) // version + mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex = 1 + mstore(add(batchHeader1, add(0x20, 9)), shl(192, 1)) // l1MessagePopped = 1 + mstore(add(batchHeader1, add(0x20, 17)), shl(192, 1)) // totalL1MessagePopped = 1 + mstore(add(batchHeader1, add(0x20, 25)), 0x7b57de313b5f6add99c3c80246deb99fbdda3735a95e4fb2dded72c874d0c116) // dataHash + mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash + mstore(add(batchHeader1, add(0x20, 89)), 0) // bitmap0 + mstore(add(batchHeader1, add(0x20, 121)), shl(192, 2)) // lastAppliedL1Block + mstore( + add(batchHeader1, add(0x20, 129)), + 0x7fef4bf8f63cf9dd467136c679c02b5c17fcf6322d9562512bf5eb952cf7cc53 + ) // blockRangeHash + } + + // set chunk data + chunk0 = new bytes(1 + 108); + assembly { + mstore(add(chunk0, 0x20), shl(248, 1)) // numBlocks = 1 + mstore(add(chunk0, add(0x21, 56)), shl(240, 1)) // numTransactions = 1, block 0 + mstore(add(chunk0, add(0x21, 58)), shl(240, 1)) // numL1Messages = 1, block 0 + mstore(add(chunk0, add(0x21, 60)), shl(192, 2)) // lastAppliedL1Block = 1, block 0 + mstore(add(chunk0, add(0x20, 69)), shl(192, 2)) // lastAppliedL1Block = 1, chunk 0 + mstore(add(chunk0, add(0x20, 77)), 0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0) // blockRangeHash (1-2) + } + + bytes[] memory chunks = new bytes[](1); + chunks[0] = chunk0; + bitmap = new bytes(32); + + hevm.startPrank(address(0)); + hevm.expectEmit(true, true, false, true); + emit CommitBatch(1, bytes32(0x17a2afe6fe48a603684b7bd5d049de9b8d2093d8c8d927d778cf87a5a553d92c)); + rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); + hevm.stopPrank(); + assertBoolEq(rollup.isBatchFinalized(1), false); + bytes32 batchHash1 = rollup.committedBatches(1); + assertEq(batchHash1, bytes32(0x17a2afe6fe48a603684b7bd5d049de9b8d2093d8c8d927d778cf87a5a553d92c)); + + // finalize batch1 + hevm.startPrank(address(0)); + hevm.expectEmit(true, true, false, true); + emit FinalizeBatch(1, batchHash1, bytes32(uint256(2)), bytes32(uint256(3))); + rollup.finalizeBatchWithProof( + batchHeader1, + bytes32(uint256(1)), + bytes32(uint256(2)), + bytes32(uint256(3)), + new bytes(0) + ); + hevm.stopPrank(); + assertBoolEq(rollup.isBatchFinalized(1), true); + assertEq(rollup.finalizedStateRoots(1), bytes32(uint256(2))); + assertEq(rollup.withdrawRoots(1), bytes32(uint256(3))); + assertEq(rollup.lastFinalizedBatchIndex(), 1); + assertBoolEq(messageQueue.isMessageSkipped(0), false); + assertEq(messageQueue.pendingQueueIndex(), 1); + } + + // commit batch, one chunk with one block, 1 tx, 1 L1 message, no skip, 256 block hashes range + function testCommitBatchMaxBlockHashes() public { + rollup.addSequencer(address(0)); + rollup.addProver(address(0)); + + // Roll 257 blocks + hevm.roll(257); + + // import 300 L1 messages + for (uint256 i = 0; i < 300; i++) { + messageQueue.appendCrossDomainMessage(address(this), 1000000, new bytes(0)); + } + + // import genesis batch first + bytes memory batchHeader0 = new bytes(129); + assembly { + mstore(add(batchHeader0, add(0x20, 25)), 1) + } + rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1))); + bytes32 batchHash0 = rollup.committedBatches(0); + + bytes memory bitmap; + // have only one chunk + bytes memory chunk0; + + // commit batch1, one chunk with one block, 1 tx, 1 L1 message, no skip + // => payload for data hash of chunk0 + // 0000000000000000 - blockNumber + // 0000000000000000 - timestamp + // 0000000000000000000000000000000000000000000000000000000000000000 - baseFee + // 0000000000000000 - gasLimit + // 0001 - numTransactions + // 0001- numL1Messages + // 00000000000000ff - lastAppliedL1Block + // a2277fd30bbbe74323309023b56035b376d7768ad237ae4fc46ead7dc9591ae1 - L1 Message Tx Hash + // 00000000000000ff - chunk 0 - lastAppliedL1Block + // 31ec624678c8b0057c3c01bc034b716fe904a41b47d3cd08ff0a525dd8041949 - chunk 0 - l1BlockRangeHash + // => data hash for all chunks + // 2572331854a9b44d1d0cd8fa8455d80ce80fc515057705c471287f0147d2b023 + // => payload for batch header + // 00 + // 0000000000000001 + // 0000000000000001 + // 0000000000000001 + // 2572331854a9b44d1d0cd8fa8455d80ce80fc515057705c471287f0147d2b023 + // 743dab51a4c73747185caad9effa81411a067f3d7aa69d69d4b7f3e9802a71c4 + // 0000000000000000000000000000000000000000000000000000000000000000 + // 00000000000000ff + // 4eeccac9884cddf06b0dd1469927028d64f9a184f3525376d0a9dbce70e9b762 + // => hash for batch header + // f1c476cd8725b8c5e5d66a60522a12144ec3acbb2b6f213cca3650df8b1e297f + bytes memory batchHeader1 = new bytes(129 + 32); + assembly { + mstore(add(batchHeader1, 0x20), 0) // version + mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex = 1 + mstore(add(batchHeader1, add(0x20, 9)), shl(192, 1)) // l1MessagePopped = 1 + mstore(add(batchHeader1, add(0x20, 17)), shl(192, 1)) // totalL1MessagePopped = 1 + mstore(add(batchHeader1, add(0x20, 25)), 0x2572331854a9b44d1d0cd8fa8455d80ce80fc515057705c471287f0147d2b023) // dataHash + mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash + mstore(add(batchHeader1, add(0x20, 89)), 0) // bitmap0 + mstore(add(batchHeader1, add(0x20, 121)), shl(192, 256)) // lastAppliedL1Block + mstore( + add(batchHeader1, add(0x20, 129)), + 0x4eeccac9884cddf06b0dd1469927028d64f9a184f3525376d0a9dbce70e9b762 + ) // blockRangeHash + } + + // set chunk data + chunk0 = new bytes(1 + 108); + assembly { + mstore(add(chunk0, 0x20), shl(248, 1)) // numBlocks = 1 + mstore(add(chunk0, add(0x21, 56)), shl(240, 1)) // numTransactions = 1, block 0 + mstore(add(chunk0, add(0x21, 58)), shl(240, 1)) // numL1Messages = 1, block 0 + mstore(add(chunk0, add(0x21, 60)), shl(192, 256)) // lastAppliedL1Block = 256, block 0 + mstore(add(chunk0, add(0x20, 69)), shl(192, 256)) // lastAppliedL1Block = 256, chunk 0 + mstore(add(chunk0, add(0x20, 77)), 0x31ec624678c8b0057c3c01bc034b716fe904a41b47d3cd08ff0a525dd8041949) // blockRangeHash (1-256) + } + + bytes[] memory chunks = new bytes[](1); + chunks[0] = chunk0; + bitmap = new bytes(32); + + hevm.startPrank(address(0)); + hevm.expectEmit(true, true, false, true); + emit CommitBatch(1, bytes32(0xf1c476cd8725b8c5e5d66a60522a12144ec3acbb2b6f213cca3650df8b1e297f)); + rollup.commitBatch(0, batchHeader0, chunks, bitmap, 0); + hevm.stopPrank(); + assertBoolEq(rollup.isBatchFinalized(1), false); + bytes32 batchHash1 = rollup.committedBatches(1); + assertEq(batchHash1, bytes32(0xf1c476cd8725b8c5e5d66a60522a12144ec3acbb2b6f213cca3650df8b1e297f)); + + // finalize batch1 + hevm.startPrank(address(0)); + hevm.expectEmit(true, true, false, true); + emit FinalizeBatch(1, batchHash1, bytes32(uint256(2)), bytes32(uint256(3))); + rollup.finalizeBatchWithProof( + batchHeader1, + bytes32(uint256(1)), + bytes32(uint256(2)), + bytes32(uint256(3)), + new bytes(0) + ); + hevm.stopPrank(); + assertBoolEq(rollup.isBatchFinalized(1), true); + assertEq(rollup.finalizedStateRoots(1), bytes32(uint256(2))); + assertEq(rollup.withdrawRoots(1), bytes32(uint256(3))); + assertEq(rollup.lastFinalizedBatchIndex(), 1); + assertBoolEq(messageQueue.isMessageSkipped(0), false); + assertEq(messageQueue.pendingQueueIndex(), 1); + } + function testAddAndRemoveProver(address _prover) public { // set by non-owner, should revert hevm.startPrank(address(1)); From 837173d4e199cfc41af5c295f7a3b947fd34bb2d Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Tue, 13 Feb 2024 22:16:24 +0200 Subject: [PATCH 58/59] test: add ScrollChain commitBatch with 10 chunks with L1BlockHashes --- contracts/src/test/ScrollChain.t.sol | 525 +++++++++++++++++++++++++++ 1 file changed, 525 insertions(+) diff --git a/contracts/src/test/ScrollChain.t.sol b/contracts/src/test/ScrollChain.t.sol index 3f6a28cdac..d00bc73bcc 100644 --- a/contracts/src/test/ScrollChain.t.sol +++ b/contracts/src/test/ScrollChain.t.sol @@ -690,6 +690,531 @@ contract ScrollChainTest is DSTestPlus { assertBoolEq(rollup.isSequencer(_sequencer), false); } + // commit batch, 10 chunks with 10 blocks, 3 L1 Block Hashes in each chunk + function testCommitBatchWithManyL1BlockHashesTxs() public { + bytes[] memory chunks = new bytes[](10); + bytes memory chunk; + + for (uint256 i = 0; i < 10; i++) { + messageQueue.appendCrossDomainMessage(address(this), 1000000, new bytes(0)); + } + + rollup.addSequencer(address(0)); + + // import genesis batch first + bytes memory batchHeader0 = new bytes(129); + assembly { + mstore(add(batchHeader0, add(0x20, 25)), 1) + } + rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1))); + + // Chunk 1 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0xdab56d50585bbe62d62bcb914061a1efb11ec54d86549bbe950c081f7f5d0b69) + mstore(add(chunk, add(0x08, 681)), 0x79) + mstore(add(chunk, add(0x08, 673)), 0x79) + mstore(add(chunk, add(0x08, 605)), 0x79) + mstore(add(chunk, add(0x08, 537)), 0x79) + mstore(add(chunk, add(0x08, 469)), 0x79) + mstore(add(chunk, add(0x08, 401)), 0x78) + mstore(add(chunk, add(0x08, 333)), 0x78) + mstore(add(chunk, add(0x08, 265)), 0x78) + mstore(add(chunk, add(0x08, 197)), 0x77) + mstore(add(chunk, add(0x08, 129)), 0x77) + mstore(add(chunk, add(0x08, 61)), 0x77) + } + chunks[0] = chunk; + + // Chunk 2 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0xf728ee46cf8d439a3e1a29a616dbc22adc899100c431020eee928abf5d3c4680) + mstore(add(chunk, add(0x08, 681)), 0x7C) + mstore(add(chunk, add(0x08, 673)), 0x7C) + mstore(add(chunk, add(0x08, 605)), 0x7C) + mstore(add(chunk, add(0x08, 537)), 0x7C) + mstore(add(chunk, add(0x08, 469)), 0x7C) + mstore(add(chunk, add(0x08, 401)), 0x7B) + mstore(add(chunk, add(0x08, 333)), 0x7B) + mstore(add(chunk, add(0x08, 265)), 0x7B) + mstore(add(chunk, add(0x08, 197)), 0x7A) + mstore(add(chunk, add(0x08, 129)), 0x7A) + mstore(add(chunk, add(0x08, 61)), 0x7A) + } + chunks[1] = chunk; + + // Chunk 3 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0x14df5fa3672808bc122794cc77d8c1c58e334af28adfbeb464341e3e4aa2c5f7) + mstore(add(chunk, add(0x08, 681)), 0x7F) + mstore(add(chunk, add(0x08, 673)), 0x7F) + mstore(add(chunk, add(0x08, 605)), 0x7F) + mstore(add(chunk, add(0x08, 537)), 0x7F) + mstore(add(chunk, add(0x08, 469)), 0x7F) + mstore(add(chunk, add(0x08, 401)), 0x7E) + mstore(add(chunk, add(0x08, 333)), 0x7E) + mstore(add(chunk, add(0x08, 265)), 0x7E) + mstore(add(chunk, add(0x08, 197)), 0x7D) + mstore(add(chunk, add(0x08, 129)), 0x7D) + mstore(add(chunk, add(0x08, 61)), 0x7D) + } + chunks[2] = chunk; + + // Chunk 4 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0x49cf7c8a8a6145fb39ddc498cc2397d27d4760fb055940495ac49012ee5874af) + mstore(add(chunk, add(0x08, 681)), 0x82) + mstore(add(chunk, add(0x08, 673)), 0x82) + mstore(add(chunk, add(0x08, 605)), 0x82) + mstore(add(chunk, add(0x08, 537)), 0x82) + mstore(add(chunk, add(0x08, 469)), 0x82) + mstore(add(chunk, add(0x08, 401)), 0x81) + mstore(add(chunk, add(0x08, 333)), 0x81) + mstore(add(chunk, add(0x08, 265)), 0x81) + mstore(add(chunk, add(0x08, 197)), 0x80) + mstore(add(chunk, add(0x08, 129)), 0x80) + mstore(add(chunk, add(0x08, 61)), 0x80) + } + chunks[3] = chunk; + + // Chunk 5 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0x6b2ab6533745754097cef3c67000c06eddc2e0a640cb3f6c25e8be8b229abe21) + mstore(add(chunk, add(0x08, 681)), 0x85) + mstore(add(chunk, add(0x08, 673)), 0x85) + mstore(add(chunk, add(0x08, 605)), 0x85) + mstore(add(chunk, add(0x08, 537)), 0x85) + mstore(add(chunk, add(0x08, 469)), 0x85) + mstore(add(chunk, add(0x08, 401)), 0x84) + mstore(add(chunk, add(0x08, 333)), 0x84) + mstore(add(chunk, add(0x08, 265)), 0x84) + mstore(add(chunk, add(0x08, 197)), 0x83) + mstore(add(chunk, add(0x08, 129)), 0x83) + mstore(add(chunk, add(0x08, 61)), 0x83) + } + chunks[4] = chunk; + + // Chunk 6 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0x4b3e0db23a327c1a25465bd90718d53aa171156c568ea29301b7af3cd88839fe) + mstore(add(chunk, add(0x08, 681)), 0x88) + mstore(add(chunk, add(0x08, 673)), 0x88) + mstore(add(chunk, add(0x08, 605)), 0x88) + mstore(add(chunk, add(0x08, 537)), 0x88) + mstore(add(chunk, add(0x08, 469)), 0x88) + mstore(add(chunk, add(0x08, 401)), 0x87) + mstore(add(chunk, add(0x08, 333)), 0x87) + mstore(add(chunk, add(0x08, 265)), 0x87) + mstore(add(chunk, add(0x08, 197)), 0x86) + mstore(add(chunk, add(0x08, 129)), 0x86) + mstore(add(chunk, add(0x08, 61)), 0x86) + } + chunks[5] = chunk; + + // Chunk 7 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0x912c42617851778491ad05827e4436c0b19d1df5fb16295554f01600b5005300) + mstore(add(chunk, add(0x08, 681)), 0x8B) + mstore(add(chunk, add(0x08, 673)), 0x8B) + mstore(add(chunk, add(0x08, 605)), 0x8B) + mstore(add(chunk, add(0x08, 537)), 0x8B) + mstore(add(chunk, add(0x08, 469)), 0x8B) + mstore(add(chunk, add(0x08, 401)), 0x8A) + mstore(add(chunk, add(0x08, 333)), 0x8A) + mstore(add(chunk, add(0x08, 265)), 0x8A) + mstore(add(chunk, add(0x08, 197)), 0x89) + mstore(add(chunk, add(0x08, 129)), 0x89) + mstore(add(chunk, add(0x08, 61)), 0x89) + } + chunks[6] = chunk; + + // Chunk 8 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0x7609178ce660a0599557566f8e76f57dfe98bbae3268752f3f03f153da817fc2) + mstore(add(chunk, add(0x08, 681)), 0x8F) + mstore(add(chunk, add(0x08, 673)), 0x8F) + mstore(add(chunk, add(0x08, 605)), 0x8F) + mstore(add(chunk, add(0x08, 537)), 0x8F) + mstore(add(chunk, add(0x08, 469)), 0x8F) + mstore(add(chunk, add(0x08, 401)), 0x8E) + mstore(add(chunk, add(0x08, 333)), 0x8E) + mstore(add(chunk, add(0x08, 265)), 0x8E) + mstore(add(chunk, add(0x08, 197)), 0x8D) + mstore(add(chunk, add(0x08, 129)), 0x8D) + mstore(add(chunk, add(0x08, 61)), 0x8D) + } + chunks[7] = chunk; + + // Chunk 9 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0xa23ca349e8c64fdbff880e51ccdba5cf8a200426f30bb25b0e43d2592a444f00) + mstore(add(chunk, add(0x08, 681)), 0x92) + mstore(add(chunk, add(0x08, 673)), 0x92) + mstore(add(chunk, add(0x08, 605)), 0x92) + mstore(add(chunk, add(0x08, 537)), 0x92) + mstore(add(chunk, add(0x08, 469)), 0x92) + mstore(add(chunk, add(0x08, 401)), 0x91) + mstore(add(chunk, add(0x08, 333)), 0x91) + mstore(add(chunk, add(0x08, 265)), 0x91) + mstore(add(chunk, add(0x08, 197)), 0x90) + mstore(add(chunk, add(0x08, 129)), 0x90) + mstore(add(chunk, add(0x08, 61)), 0x90) + } + chunks[8] = chunk; + + // Chunk 10 + chunk = new bytes(1 + 40 + 680); + chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk + // Block 1 + chunk[58] = bytes1(uint8(2)); // numTransactions = 2 + chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 2 + chunk[126] = bytes1(uint8(1)); // numTransactions = 1 + chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 3 + chunk[194] = bytes1(uint8(1)); // numTransactions = 1 + chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 4 + chunk[262] = bytes1(uint8(2)); // numTransactions = 2 + chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 5 + chunk[330] = bytes1(uint8(1)); // numTransactions = 1 + chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 6 + chunk[392] = bytes1(uint8(1)); // numTransactions = 1 + chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 7 + chunk[454] = bytes1(uint8(2)); // numTransactions = 2 + chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 8 + chunk[516] = bytes1(uint8(1)); // numTransactions = 1 + chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 9 + chunk[576] = bytes1(uint8(1)); // numTransactions = 1 + chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 + // Block 10 + chunk[640] = bytes1(uint8(1)); // numTransactions = 1 + chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 + + assembly { + mstore(add(chunk, add(0x20, 689)), 0xae419b6fa346fcfe204bcae6d957ba39a40c04dd05f2104b47a83e1587f59bc7) + mstore(add(chunk, add(0x08, 681)), 0x95) + mstore(add(chunk, add(0x08, 673)), 0x95) + mstore(add(chunk, add(0x08, 605)), 0x95) + mstore(add(chunk, add(0x08, 537)), 0x95) + mstore(add(chunk, add(0x08, 469)), 0x95) + mstore(add(chunk, add(0x08, 401)), 0x94) + mstore(add(chunk, add(0x08, 333)), 0x94) + mstore(add(chunk, add(0x08, 265)), 0x94) + mstore(add(chunk, add(0x08, 197)), 0x93) + mstore(add(chunk, add(0x08, 129)), 0x93) + mstore(add(chunk, add(0x08, 61)), 0x93) + } + chunks[9] = chunk; + + hevm.roll(150); + hevm.startPrank(address(0)); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 118); + hevm.stopPrank(); + assertGt(uint256(rollup.committedBatches(1)), 0); + } + // commit batch, one chunk with one block, 1 tx, 1 L1 message, no skip, 1 block hash range function testCommitBatchOneBlockHash() public { rollup.addSequencer(address(0)); From 8088b2ed5901548070c301106fa6596198d6f71c Mon Sep 17 00:00:00 2001 From: Vladimir Trifonov Date: Thu, 15 Feb 2024 16:38:27 +0200 Subject: [PATCH 59/59] fix: add update ScrollChain test testCommitBatchWithManyL1BlockHashesTxs --- contracts/src/test/ScrollChain.t.sol | 1248 ++++++++++++++++---------- 1 file changed, 784 insertions(+), 464 deletions(-) diff --git a/contracts/src/test/ScrollChain.t.sol b/contracts/src/test/ScrollChain.t.sol index d00bc73bcc..af7a3e8a99 100644 --- a/contracts/src/test/ScrollChain.t.sol +++ b/contracts/src/test/ScrollChain.t.sol @@ -690,16 +690,17 @@ contract ScrollChainTest is DSTestPlus { assertBoolEq(rollup.isSequencer(_sequencer), false); } - // commit batch, 10 chunks with 10 blocks, 3 L1 Block Hashes in each chunk + // commit batch, 10 chunks, many txs, many L1 messages, many l1 block hashes function testCommitBatchWithManyL1BlockHashesTxs() public { bytes[] memory chunks = new bytes[](10); bytes memory chunk; - for (uint256 i = 0; i < 10; i++) { + for (uint256 i = 0; i < 300; i++) { messageQueue.appendCrossDomainMessage(address(this), 1000000, new bytes(0)); } rollup.addSequencer(address(0)); + rollup.addProver(address(0)); // import genesis batch first bytes memory batchHeader0 = new bytes(129); @@ -707,512 +708,831 @@ contract ScrollChainTest is DSTestPlus { mstore(add(batchHeader0, add(0x20, 25)), 1) } rollup.importGenesisBatch(batchHeader0, bytes32(uint256(1))); + bytes32 batchHash0 = rollup.committedBatches(0); + + bytes memory batchHeader1 = new bytes(129 + 32); + assembly { + mstore(add(batchHeader1, 0x20), 0) // version + mstore(add(batchHeader1, add(0x20, 1)), shl(192, 1)) // batchIndex = 1 + mstore(add(batchHeader1, add(0x20, 9)), shl(192, 162)) // l1MessagePopped = 162 + mstore(add(batchHeader1, add(0x20, 17)), shl(192, 162)) // totalL1MessagePopped = 162 + mstore(add(batchHeader1, add(0x20, 25)), 0x7b57de313b5f6add99c3c80246deb99fbdda3735a95e4fb2dded72c874d0c116) // dataHash + mstore(add(batchHeader1, add(0x20, 57)), batchHash0) // parentBatchHash + mstore(add(batchHeader1, add(0x20, 89)), 0) // bitmap0 + mstore(add(batchHeader1, add(0x20, 121)), shl(192, 0x8A)) // lastAppliedL1Block + mstore( + add(batchHeader1, add(0x20, 129)), + 0xa8cf448633ba639062b57859aec34b05e57c0f4774451fba0aba365865c785e2 + ) // blockRangeHash + } + + uint256 chunkPtr; + uint256 blockPtr; + uint256 offsetPtr; // Chunk 1 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0xdab56d50585bbe62d62bcb914061a1efb11ec54d86549bbe950c081f7f5d0b69) - mstore(add(chunk, add(0x08, 681)), 0x79) - mstore(add(chunk, add(0x08, 673)), 0x79) - mstore(add(chunk, add(0x08, 605)), 0x79) - mstore(add(chunk, add(0x08, 537)), 0x79) - mstore(add(chunk, add(0x08, 469)), 0x79) - mstore(add(chunk, add(0x08, 401)), 0x78) - mstore(add(chunk, add(0x08, 333)), 0x78) - mstore(add(chunk, add(0x08, 265)), 0x78) - mstore(add(chunk, add(0x08, 197)), 0x77) - mstore(add(chunk, add(0x08, 129)), 0x77) - mstore(add(chunk, add(0x08, 61)), 0x77) + chunk = new bytes( + 1 + + 40 + + 3 * + 68 + // 3 blocks + 19 * + 4 + + 19 * + 512 // 19 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 3)) // numBlocks = 3 } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 7)) // numTransactions = 7 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 3)) // numL1Messages = 3 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x77)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 9)) // numTransactions = 9 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x77)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 13)) // numTransactions = 13 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x77)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 19) { + + } { + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x77)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0xa8cf448633ba639062b57859aec34b05e57c0f4774451fba0aba365865c785e2) // l1 block range hash in chunk + } + chunks[0] = chunk; // Chunk 2 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0xf728ee46cf8d439a3e1a29a616dbc22adc899100c431020eee928abf5d3c4680) - mstore(add(chunk, add(0x08, 681)), 0x7C) - mstore(add(chunk, add(0x08, 673)), 0x7C) - mstore(add(chunk, add(0x08, 605)), 0x7C) - mstore(add(chunk, add(0x08, 537)), 0x7C) - mstore(add(chunk, add(0x08, 469)), 0x7C) - mstore(add(chunk, add(0x08, 401)), 0x7B) - mstore(add(chunk, add(0x08, 333)), 0x7B) - mstore(add(chunk, add(0x08, 265)), 0x7B) - mstore(add(chunk, add(0x08, 197)), 0x7A) - mstore(add(chunk, add(0x08, 129)), 0x7A) - mstore(add(chunk, add(0x08, 61)), 0x7A) + chunk = new bytes( + 1 + + 40 + + 8 * + 68 + // 8 blocks + 44 * + 4 + + 44 * + 512 // 40 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 8)) // numBlocks = 8 + } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 22)) // numTransactions = 22 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 10)) // numL1Messages = 10 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x78)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 5)) // numTransactions = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 1)) // numL1Messages = 1 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x78)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 15)) // numTransactions = 15 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 9)) // numL1Messages = 9 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x78)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 5)) // numTransactions = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 3)) // numL1Messages = 3 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x78)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 12)) // numL1Messages = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x78)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 11)) // numTransactions = 11 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7A)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7A)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7B)) // lastAppliedL1Block } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 44) { + + } { + // 44 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x7B)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0x092406e276c3b811614283ada503db6bf86fe8e4771920d1691301213c60ad27) // l1 block range hash in chunk + } + chunks[1] = chunk; // Chunk 3 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0x14df5fa3672808bc122794cc77d8c1c58e334af28adfbeb464341e3e4aa2c5f7) - mstore(add(chunk, add(0x08, 681)), 0x7F) - mstore(add(chunk, add(0x08, 673)), 0x7F) - mstore(add(chunk, add(0x08, 605)), 0x7F) - mstore(add(chunk, add(0x08, 537)), 0x7F) - mstore(add(chunk, add(0x08, 469)), 0x7F) - mstore(add(chunk, add(0x08, 401)), 0x7E) - mstore(add(chunk, add(0x08, 333)), 0x7E) - mstore(add(chunk, add(0x08, 265)), 0x7E) - mstore(add(chunk, add(0x08, 197)), 0x7D) - mstore(add(chunk, add(0x08, 129)), 0x7D) - mstore(add(chunk, add(0x08, 61)), 0x7D) + chunk = new bytes( + 1 + + 40 + + 2 * + 68 + // 2 blocks + 13 * + 4 + + 13 * + 512 // 13 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 2)) // numBlocks = 2 } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 19)) // numTransactions = 19 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 10)) // numL1Messages = 10 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7B)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 5)) // numTransactions = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 1)) // numL1Messages = 1 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7C)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 13) { + + } { + // 13 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x7C)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0xfd65998deda42723bb9b4108c7a015c3ce1c178917dd7c235dec2d68de3e73dd) // l1 block range hash in chunk + } + chunks[2] = chunk; // Chunk 4 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0x49cf7c8a8a6145fb39ddc498cc2397d27d4760fb055940495ac49012ee5874af) - mstore(add(chunk, add(0x08, 681)), 0x82) - mstore(add(chunk, add(0x08, 673)), 0x82) - mstore(add(chunk, add(0x08, 605)), 0x82) - mstore(add(chunk, add(0x08, 537)), 0x82) - mstore(add(chunk, add(0x08, 469)), 0x82) - mstore(add(chunk, add(0x08, 401)), 0x81) - mstore(add(chunk, add(0x08, 333)), 0x81) - mstore(add(chunk, add(0x08, 265)), 0x81) - mstore(add(chunk, add(0x08, 197)), 0x80) - mstore(add(chunk, add(0x08, 129)), 0x80) - mstore(add(chunk, add(0x08, 61)), 0x80) + chunk = new bytes( + 1 + + 40 + + 3 * + 68 + // 3 blocks + 20 * + 4 + + 20 * + 512 // 20 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 3)) // numBlocks = 3 + } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7C)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 7)) // numTransactions = 7 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7D)) // lastAppliedL1Block } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 7)) // numTransactions = 7 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7D)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 20) { + + } { + // 20 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x7D)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0x39e5371562db15cae477e227498ea2b8caecdd60d087341449f8b46a102f0af7) // l1 block range hash in chunk + } + chunks[3] = chunk; // Chunk 5 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0x6b2ab6533745754097cef3c67000c06eddc2e0a640cb3f6c25e8be8b229abe21) - mstore(add(chunk, add(0x08, 681)), 0x85) - mstore(add(chunk, add(0x08, 673)), 0x85) - mstore(add(chunk, add(0x08, 605)), 0x85) - mstore(add(chunk, add(0x08, 537)), 0x85) - mstore(add(chunk, add(0x08, 469)), 0x85) - mstore(add(chunk, add(0x08, 401)), 0x84) - mstore(add(chunk, add(0x08, 333)), 0x84) - mstore(add(chunk, add(0x08, 265)), 0x84) - mstore(add(chunk, add(0x08, 197)), 0x83) - mstore(add(chunk, add(0x08, 129)), 0x83) - mstore(add(chunk, add(0x08, 61)), 0x83) + chunk = new bytes( + 1 + + 40 + + 2 * + 68 + // 2 blocks + 12 * + 4 + + 12 * + 512 // 12 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 2)) // numBlocks = 2 + } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 8)) // numTransactions = 8 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7E)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 8)) // numTransactions = 8 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7E)) // lastAppliedL1Block } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 12) { + + } { + // 20 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x7E)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0x3a20462b836d7e6b2754037a9f47405d03eb3e3c7af98c0b3b05c4d64542bb45) // l1 block range hash in chunk + } + chunks[4] = chunk; // Chunk 6 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0x4b3e0db23a327c1a25465bd90718d53aa171156c568ea29301b7af3cd88839fe) - mstore(add(chunk, add(0x08, 681)), 0x88) - mstore(add(chunk, add(0x08, 673)), 0x88) - mstore(add(chunk, add(0x08, 605)), 0x88) - mstore(add(chunk, add(0x08, 537)), 0x88) - mstore(add(chunk, add(0x08, 469)), 0x88) - mstore(add(chunk, add(0x08, 401)), 0x87) - mstore(add(chunk, add(0x08, 333)), 0x87) - mstore(add(chunk, add(0x08, 265)), 0x87) - mstore(add(chunk, add(0x08, 197)), 0x86) - mstore(add(chunk, add(0x08, 129)), 0x86) - mstore(add(chunk, add(0x08, 61)), 0x86) + chunk = new bytes( + 1 + + 40 + + 3 * + 68 + // 3 blocks + 19 * + 4 + + 19 * + 512 // 19 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 3)) // numBlocks = 3 } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 7)) // numTransactions = 7 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 3)) // numL1Messages = 3 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7F)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 9)) // numTransactions = 9 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x7F)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 13)) // numTransactions = 13 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x80)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 19) { + + } { + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x80)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0xff733928d2850747c2af8eeda384b947ced90ba76058a78bec5f6966d34b263c) // l1 block range hash in chunk + } + chunks[5] = chunk; // Chunk 7 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0x912c42617851778491ad05827e4436c0b19d1df5fb16295554f01600b5005300) - mstore(add(chunk, add(0x08, 681)), 0x8B) - mstore(add(chunk, add(0x08, 673)), 0x8B) - mstore(add(chunk, add(0x08, 605)), 0x8B) - mstore(add(chunk, add(0x08, 537)), 0x8B) - mstore(add(chunk, add(0x08, 469)), 0x8B) - mstore(add(chunk, add(0x08, 401)), 0x8A) - mstore(add(chunk, add(0x08, 333)), 0x8A) - mstore(add(chunk, add(0x08, 265)), 0x8A) - mstore(add(chunk, add(0x08, 197)), 0x89) - mstore(add(chunk, add(0x08, 129)), 0x89) - mstore(add(chunk, add(0x08, 61)), 0x89) + chunk = new bytes( + 1 + + 40 + + 8 * + 68 + // 8 blocks + 44 * + 4 + + 44 * + 512 // 40 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 8)) // numBlocks = 8 + } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 22)) // numTransactions = 22 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 10)) // numL1Messages = 10 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x80)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 5)) // numTransactions = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 1)) // numL1Messages = 1 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x80)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 15)) // numTransactions = 15 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 9)) // numL1Messages = 9 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x82)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 5)) // numTransactions = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 3)) // numL1Messages = 3 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x82)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 12)) // numL1Messages = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x83)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 11)) // numTransactions = 11 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x83)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x83)) // lastAppliedL1Block } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 5)) // numL1Messages = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x84)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 44) { + + } { + // 44 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x84)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0xd499ae9fd9156c709dba0db48923020966a142d871cbe6c4844227783614d9a3) // l1 block range hash in chunk + } + chunks[6] = chunk; // Chunk 8 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0x7609178ce660a0599557566f8e76f57dfe98bbae3268752f3f03f153da817fc2) - mstore(add(chunk, add(0x08, 681)), 0x8F) - mstore(add(chunk, add(0x08, 673)), 0x8F) - mstore(add(chunk, add(0x08, 605)), 0x8F) - mstore(add(chunk, add(0x08, 537)), 0x8F) - mstore(add(chunk, add(0x08, 469)), 0x8F) - mstore(add(chunk, add(0x08, 401)), 0x8E) - mstore(add(chunk, add(0x08, 333)), 0x8E) - mstore(add(chunk, add(0x08, 265)), 0x8E) - mstore(add(chunk, add(0x08, 197)), 0x8D) - mstore(add(chunk, add(0x08, 129)), 0x8D) - mstore(add(chunk, add(0x08, 61)), 0x8D) + chunk = new bytes( + 1 + + 40 + + 2 * + 68 + // 2 blocks + 13 * + 4 + + 13 * + 512 // 13 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 2)) // numBlocks = 2 } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 19)) // numTransactions = 19 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 10)) // numL1Messages = 10 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x85)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 5)) // numTransactions = 5 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 1)) // numL1Messages = 1 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x85)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 13) { + + } { + // 13 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x85)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0x9cbf2ce36946ee52114304e2dc20af020d6c27d837e20277f1ce8a6e3019a75b) // l1 block range hash in chunk + } + chunks[7] = chunk; // Chunk 9 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0xa23ca349e8c64fdbff880e51ccdba5cf8a200426f30bb25b0e43d2592a444f00) - mstore(add(chunk, add(0x08, 681)), 0x92) - mstore(add(chunk, add(0x08, 673)), 0x92) - mstore(add(chunk, add(0x08, 605)), 0x92) - mstore(add(chunk, add(0x08, 537)), 0x92) - mstore(add(chunk, add(0x08, 469)), 0x92) - mstore(add(chunk, add(0x08, 401)), 0x91) - mstore(add(chunk, add(0x08, 333)), 0x91) - mstore(add(chunk, add(0x08, 265)), 0x91) - mstore(add(chunk, add(0x08, 197)), 0x90) - mstore(add(chunk, add(0x08, 129)), 0x90) - mstore(add(chunk, add(0x08, 61)), 0x90) + chunk = new bytes( + 1 + + 40 + + 3 * + 68 + // 3 blocks + 20 * + 4 + + 20 * + 512 // 20 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 3)) // numBlocks = 3 + } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 12)) // numTransactions = 12 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x85)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 7)) // numTransactions = 7 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x85)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 7)) // numTransactions = 7 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x86)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 20) { + + } { + // 20 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x86)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0x638ec9231242321da81ceaa230e9df6e1f9fa53213076d45bf4b9ba16b1c5d22) // l1 block range hash in chunk } + chunks[8] = chunk; // Chunk 10 - chunk = new bytes(1 + 40 + 680); - chunk[0] = bytes1(uint8(10)); // 10 blocks in this chunk - // Block 1 - chunk[58] = bytes1(uint8(2)); // numTransactions = 2 - chunk[60] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 2 - chunk[126] = bytes1(uint8(1)); // numTransactions = 1 - chunk[128] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 3 - chunk[194] = bytes1(uint8(1)); // numTransactions = 1 - chunk[196] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 4 - chunk[262] = bytes1(uint8(2)); // numTransactions = 2 - chunk[264] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 5 - chunk[330] = bytes1(uint8(1)); // numTransactions = 1 - chunk[332] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 6 - chunk[392] = bytes1(uint8(1)); // numTransactions = 1 - chunk[394] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 7 - chunk[454] = bytes1(uint8(2)); // numTransactions = 2 - chunk[456] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 8 - chunk[516] = bytes1(uint8(1)); // numTransactions = 1 - chunk[518] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 9 - chunk[576] = bytes1(uint8(1)); // numTransactions = 1 - chunk[578] = bytes1(uint8(1)); // numL1Messages = 1 - // Block 10 - chunk[640] = bytes1(uint8(1)); // numTransactions = 1 - chunk[642] = bytes1(uint8(1)); // numL1Messages = 1 - - assembly { - mstore(add(chunk, add(0x20, 689)), 0xae419b6fa346fcfe204bcae6d957ba39a40c04dd05f2104b47a83e1587f59bc7) - mstore(add(chunk, add(0x08, 681)), 0x95) - mstore(add(chunk, add(0x08, 673)), 0x95) - mstore(add(chunk, add(0x08, 605)), 0x95) - mstore(add(chunk, add(0x08, 537)), 0x95) - mstore(add(chunk, add(0x08, 469)), 0x95) - mstore(add(chunk, add(0x08, 401)), 0x94) - mstore(add(chunk, add(0x08, 333)), 0x94) - mstore(add(chunk, add(0x08, 265)), 0x94) - mstore(add(chunk, add(0x08, 197)), 0x93) - mstore(add(chunk, add(0x08, 129)), 0x93) - mstore(add(chunk, add(0x08, 61)), 0x93) + chunk = new bytes( + 1 + + 40 + + 2 * + 68 + // 2 blocks + 12 * + 4 + + 12 * + 512 // 12 not L1 Msg Tx + ); + + assembly { + chunkPtr := add(chunk, 0x20) + blockPtr := add(chunk, add(0x20, 1)) + + mstore(chunkPtr, shl(248, 2)) // numBlocks = 2 } + + assembly { + offsetPtr := add(blockPtr, 56) + mstore(offsetPtr, shl(240, 8)) // numTransactions = 8 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x87)) // lastAppliedL1Block + } + + assembly { + blockPtr := add(offsetPtr, 8) + offsetPtr := add(blockPtr, 56) + + mstore(offsetPtr, shl(240, 8)) // numTransactions = 8 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(240, 2)) // numL1Messages = 2 + offsetPtr := add(offsetPtr, 2) + mstore(offsetPtr, shl(192, 0x8A)) // lastAppliedL1Block + } + + assembly { + offsetPtr := add(offsetPtr, 8) + + let i := 0 + for { + + } lt(i, 12) { + + } { + // 20 Txs + mstore(offsetPtr, shl(224, 512)) // 4 bites size of 512 bytes Tx Payload + offsetPtr := add(offsetPtr, 516) // 4 bytes size + 512 bytes Tx Payload + i := add(i, 1) + } + + mstore(offsetPtr, shl(192, 0x8A)) // lastAppliedL1Block in chunk + offsetPtr := add(offsetPtr, 8) + mstore(offsetPtr, 0x9791787170769e70bc0ddac0baeb9991a547873255a58d4bb0223b7437ecb22b) // l1 block range hash in chunk + } + chunks[9] = chunk; - hevm.roll(150); + hevm.roll(250); hevm.startPrank(address(0)); - rollup.commitBatch(0, batchHeader0, chunks, new bytes(0), 118); + startMeasuringGas("commitBatch"); + rollup.commitBatch(0, batchHeader0, chunks, new bytes(32), 118); + stopMeasuringGas(); hevm.stopPrank(); assertGt(uint256(rollup.committedBatches(1)), 0); + + hevm.startPrank(address(0)); + startMeasuringGas("finalizeBatchWithProof"); + rollup.finalizeBatchWithProof( + batchHeader1, + bytes32(uint256(1)), + bytes32(uint256(2)), + bytes32(uint256(3)), + new bytes(0) + ); + stopMeasuringGas(); + hevm.stopPrank(); } // commit batch, one chunk with one block, 1 tx, 1 L1 message, no skip, 1 block hash range