Skip to content

Commit

Permalink
Merge branch 'main' into add-codecv3
Browse files Browse the repository at this point in the history
  • Loading branch information
colinlyguo committed Jun 25, 2024
2 parents 4de8a88 + 202b523 commit 311646b
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 217 deletions.
74 changes: 4 additions & 70 deletions encoding/codecv1/codecv1.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"encoding/hex"
"errors"
"fmt"
"math"
"math/big"
"strings"
"sync"
Expand All @@ -18,6 +17,7 @@ import (
"github.com/scroll-tech/go-ethereum/crypto/kzg4844"

"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv0"
)

// BLSModulus is the BLS modulus defined in EIP-4844.
Expand All @@ -27,20 +27,10 @@ var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80
const MaxNumChunks = 15

// DABlock represents a Data Availability Block.
type DABlock struct {
BlockNumber uint64
Timestamp uint64
BaseFee *big.Int
GasLimit uint64
NumTransactions uint16
NumL1Messages uint16
}
type DABlock = codecv0.DABlock

// DAChunk groups consecutive DABlocks with their transactions.
type DAChunk struct {
Blocks []*DABlock
Transactions [][]*types.TransactionData
}
type DAChunk codecv0.DAChunk

// DABatch contains metadata about a batch of DAChunks.
type DABatch struct {
Expand All @@ -61,63 +51,7 @@ type DABatch struct {

// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before.
func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) {
if !block.Header.Number.IsUint64() {
return nil, errors.New("block number is not uint64")
}

// note: numL1Messages includes skipped messages
numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore)
if numL1Messages > math.MaxUint16 {
return nil, errors.New("number of L1 messages exceeds max uint16")
}

// note: numTransactions includes skipped messages
numL2Transactions := block.NumL2Transactions()
numTransactions := numL1Messages + numL2Transactions
if numTransactions > math.MaxUint16 {
return nil, errors.New("number of transactions exceeds max uint16")
}

daBlock := DABlock{
BlockNumber: block.Header.Number.Uint64(),
Timestamp: block.Header.Time,
BaseFee: block.Header.BaseFee,
GasLimit: block.Header.GasLimit,
NumTransactions: uint16(numTransactions),
NumL1Messages: uint16(numL1Messages),
}

return &daBlock, nil
}

// Encode serializes the DABlock into a slice of bytes.
func (b *DABlock) Encode() []byte {
bytes := make([]byte, 60)
binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber)
binary.BigEndian.PutUint64(bytes[8:], b.Timestamp)
if b.BaseFee != nil {
binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64())
}
binary.BigEndian.PutUint64(bytes[48:], b.GasLimit)
binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions)
binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages)
return bytes
}

// Decode populates the fields of a DABlock from a byte slice.
func (b *DABlock) Decode(bytes []byte) error {
if len(bytes) != 60 {
return errors.New("block encoding is not 60 bytes long")
}

b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8])
b.Timestamp = binary.BigEndian.Uint64(bytes[8:16])
b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48]))
b.GasLimit = binary.BigEndian.Uint64(bytes[48:56])
b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58])
b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60])

return nil
return codecv0.NewDABlock(block, totalL1MessagePoppedBefore)
}

// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before.
Expand Down
151 changes: 5 additions & 146 deletions encoding/codecv2/codecv2.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@ import (
"encoding/hex"
"errors"
"fmt"
"math"
"math/big"
"strings"
"unsafe"

"github.com/scroll-tech/go-ethereum/common"
Expand All @@ -30,20 +28,10 @@ import (
const MaxNumChunks = 45

// DABlock represents a Data Availability Block.
type DABlock struct {
BlockNumber uint64
Timestamp uint64
BaseFee *big.Int
GasLimit uint64
NumTransactions uint16
NumL1Messages uint16
}
type DABlock = codecv1.DABlock

// DAChunk groups consecutive DABlocks with their transactions.
type DAChunk struct {
Blocks []*DABlock
Transactions [][]*types.TransactionData
}
type DAChunk = codecv1.DAChunk

// DABatch contains metadata about a batch of DAChunks.
type DABatch struct {
Expand All @@ -64,133 +52,12 @@ type DABatch struct {

// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before.
func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) {
if !block.Header.Number.IsUint64() {
return nil, errors.New("block number is not uint64")
}

// note: numL1Messages includes skipped messages
numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore)
if numL1Messages > math.MaxUint16 {
return nil, errors.New("number of L1 messages exceeds max uint16")
}

// note: numTransactions includes skipped messages
numL2Transactions := block.NumL2Transactions()
numTransactions := numL1Messages + numL2Transactions
if numTransactions > math.MaxUint16 {
return nil, errors.New("number of transactions exceeds max uint16")
}

daBlock := DABlock{
BlockNumber: block.Header.Number.Uint64(),
Timestamp: block.Header.Time,
BaseFee: block.Header.BaseFee,
GasLimit: block.Header.GasLimit,
NumTransactions: uint16(numTransactions),
NumL1Messages: uint16(numL1Messages),
}

return &daBlock, nil
}

// Encode serializes the DABlock into a slice of bytes.
func (b *DABlock) Encode() []byte {
bytes := make([]byte, 60)
binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber)
binary.BigEndian.PutUint64(bytes[8:], b.Timestamp)
if b.BaseFee != nil {
binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64())
}
binary.BigEndian.PutUint64(bytes[48:], b.GasLimit)
binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions)
binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages)
return bytes
}

// Decode populates the fields of a DABlock from a byte slice.
func (b *DABlock) Decode(bytes []byte) error {
if len(bytes) != 60 {
return errors.New("block encoding is not 60 bytes long")
}

b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8])
b.Timestamp = binary.BigEndian.Uint64(bytes[8:16])
b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48]))
b.GasLimit = binary.BigEndian.Uint64(bytes[48:56])
b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58])
b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60])

return nil
return codecv1.NewDABlock(block, totalL1MessagePoppedBefore)
}

// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before.
func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) {
var blocks []*DABlock
var txs [][]*types.TransactionData

for _, block := range chunk.Blocks {
b, err := NewDABlock(block, totalL1MessagePoppedBefore)
if err != nil {
return nil, err
}
blocks = append(blocks, b)
totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore)
txs = append(txs, block.Transactions)
}

daChunk := DAChunk{
Blocks: blocks,
Transactions: txs,
}

return &daChunk, nil
}

// Encode serializes the DAChunk into a slice of bytes.
func (c *DAChunk) Encode() []byte {
var chunkBytes []byte
chunkBytes = append(chunkBytes, byte(len(c.Blocks)))

for _, block := range c.Blocks {
blockBytes := block.Encode()
chunkBytes = append(chunkBytes, blockBytes...)
}

return chunkBytes
}

// Hash computes the hash of the DAChunk data.
func (c *DAChunk) Hash() (common.Hash, error) {
var dataBytes []byte

// concatenate block contexts
for _, block := range c.Blocks {
encodedBlock := block.Encode()
// only the first 58 bytes are used in the hashing process
dataBytes = append(dataBytes, encodedBlock[:58]...)
}

// concatenate l1 tx hashes
for _, blockTxs := range c.Transactions {
for _, txData := range blockTxs {
if txData.Type != types.L1MessageTxType {
continue
}

txHash := strings.TrimPrefix(txData.TxHash, "0x")
hashBytes, err := hex.DecodeString(txHash)
if err != nil {
return common.Hash{}, err
}
if len(hashBytes) != 32 {
return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash)
}
dataBytes = append(dataBytes, hashBytes...)
}
}

hash := crypto.Keccak256Hash(dataBytes)
return hash, nil
return codecv1.NewDAChunk(chunk, totalL1MessagePoppedBefore)
}

// NewDABatch creates a DABatch from the provided encoding.Batch.
Expand All @@ -205,7 +72,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
}

// batch data hash
dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore)
dataHash, err := codecv1.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -238,14 +105,6 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
return &daBatch, nil
}

// ComputeBatchDataHash computes the data hash of the batch.
// Note: The batch hash and batch data hash are two different hashes,
// the former is used for identifying a badge in the contracts,
// the latter is used in the public input to the provers.
func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) {
return codecv1.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore)
}

// ConstructBlobPayload constructs the 4844 blob payload.
func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) {
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
Expand Down
36 changes: 36 additions & 0 deletions encoding/codecv2/codecv2_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -847,6 +847,42 @@ func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) {
assert.Equal(t, uint64(3186), batch5BlobSize)
}

func TestCodecV2ChunkAndBatchCalldataSizeEstimation(t *testing.T) {
trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json")
chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}}
chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2)
assert.Equal(t, uint64(60), chunk2CalldataSize)
batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}}
batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2)
assert.Equal(t, uint64(60), batch2CalldataSize)

trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json")
chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}}
chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3)
assert.Equal(t, uint64(60), chunk3CalldataSize)
batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}}
batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3)
assert.Equal(t, uint64(60), batch3CalldataSize)

trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json")
chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}}
chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4)
assert.Equal(t, uint64(60), chunk4CalldataSize)
batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}}
batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4)
assert.Equal(t, uint64(60), batch4CalldataSize)

chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}}
chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5)
assert.Equal(t, uint64(120), chunk5CalldataSize)
chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}}
chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6)
assert.Equal(t, uint64(60), chunk6CalldataSize)
batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}}
batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5)
assert.Equal(t, uint64(180), batch5CalldataSize)
}

func readBlockFromJSON(t *testing.T, filename string) *encoding.Block {
data, err := os.ReadFile(filename)
assert.NoError(t, err)
Expand Down
2 changes: 1 addition & 1 deletion encoding/codecv3/codecv3.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
}

// batch data hash
dataHash, err := codecv2.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore)
dataHash, err := codecv1.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore)
if err != nil {
return nil, err
}
Expand Down

0 comments on commit 311646b

Please sign in to comment.