From ad42cd93cd11828f7b3cb1671b81fd852413c019 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 15 Aug 2024 02:57:18 +0800 Subject: [PATCH 01/46] feat: support conditional encode --- encoding/codecv2/codecv2.go | 34 ++++++-- encoding/codecv2/codecv2_test.go | 20 ++--- encoding/codecv3/codecv3.go | 16 ++-- encoding/codecv3/codecv3_test.go | 136 +++++++++++++++---------------- encoding/da.go | 2 +- 5 files changed, 115 insertions(+), 93 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 38ef939..ff7667f 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -89,7 +89,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no conditional encode */, false /* no mock */) if err != nil { return nil, err } @@ -119,7 +119,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -184,6 +184,20 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return nil, common.Hash{}, nil, err } + if conditionalEncode { + encoded := len(blobBytes) < len(batchBytes) + if encoded { + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = append([]byte{0}, batchBytes...) + } + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. @@ -306,7 +320,7 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err @@ -315,11 +329,15 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + blobBytesLen := uint64(len(blobBytes)) + if conditionalEncode { + blobBytesLen += 1 + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err @@ -328,7 +346,11 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + blobBytesLen := uint64(len(blobBytes)) + if conditionalEncode { + blobBytesLen += 1 + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index 808c71f..a623f66 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -870,52 +870,52 @@ func TestCodecV2BatchSkipBitmap(t *testing.T) { func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index bfe0d2a..8be9694 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -53,7 +53,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -80,7 +80,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, conditionalEncode, false /* no mock */) if err != nil { return nil, err } @@ -118,8 +118,8 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - return codecv2.ConstructBlobPayload(chunks, useMockTxData) +func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + return codecv2.ConstructBlobPayload(chunks, conditionalEncode, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -232,13 +232,13 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { + return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c, conditionalEncode) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { + return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b, conditionalEncode) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index f2eb41a..080f743 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -217,7 +217,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) @@ -225,7 +225,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) @@ -233,7 +233,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) @@ -241,7 +241,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -249,7 +249,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -257,13 +257,13 @@ func TestCodecV3BatchEncode(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) @@ -271,7 +271,7 @@ func TestCodecV3BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -285,54 +285,54 @@ func TestCodecV3BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) } @@ -341,54 +341,54 @@ func TestCodecV3BatchDataHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) } @@ -397,7 +397,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) @@ -406,7 +406,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) @@ -415,7 +415,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) @@ -425,7 +425,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -434,7 +434,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -443,7 +443,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -451,7 +451,7 @@ func TestCodecV3BatchBlob(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) @@ -460,7 +460,7 @@ func TestCodecV3BatchBlob(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) @@ -471,55 +471,55 @@ func TestCodecV3BatchChallenge(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) } @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -710,7 +710,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err := batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -719,7 +719,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -728,7 +728,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -737,7 +737,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -755,7 +755,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -763,7 +763,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -772,7 +772,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,7 +783,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) + batch, err := NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -791,7 +791,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -799,7 +799,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -807,13 +807,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -821,7 +821,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) @@ -829,13 +829,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) @@ -843,13 +843,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) + batch, err = NewDABatch(originalBatch, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -858,52 +858,52 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/da.go b/encoding/da.go index 73d8b23..35befdb 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -21,7 +21,7 @@ const ( // CodecV2 represents the version 2 of the encoder and decoder. CodecV2 - // CodecV3 represents the version 2 of the encoder and decoder. + // CodecV3 represents the version 3 of the encoder and decoder. CodecV3 ) From 62758c888b007774f3d22f2728aa90ce92e3455a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 15 Aug 2024 03:16:25 +0800 Subject: [PATCH 02/46] move append conditionalEncode flag after validity check --- encoding/codecv2/codecv2.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index ff7667f..5e4b464 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -184,6 +184,15 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM return nil, common.Hash{}, nil, err } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if !useMockTxData && len(batchBytes) > 131072 { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, err + } + } + if conditionalEncode { encoded := len(blobBytes) < len(batchBytes) if encoded { @@ -198,15 +207,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") } - // Only apply this check when the uncompressed batch data has exceeded 128 KiB. - if !useMockTxData && len(batchBytes) > 131072 { - // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err - } - } - // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { From 6901956c991008919ebfac9ee45c9ff8f370c365 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 18 Aug 2024 20:28:43 +0800 Subject: [PATCH 03/46] update da-codec --- encoding/codecv1/codecv1.go | 5 +- encoding/codecv2/codecv2.go | 34 +- encoding/codecv2/codecv2_test.go | 28 +- encoding/codecv3/codecv3.go | 16 +- encoding/codecv3/codecv3_test.go | 144 +++--- encoding/codecv4/codecv4.go | 534 ++++++++++++++++++++ encoding/codecv4/codecv4_test.go | 837 +++++++++++++++++++++++++++++++ encoding/da.go | 7 + 8 files changed, 1480 insertions(+), 125 deletions(-) create mode 100644 encoding/codecv4/codecv4.go create mode 100644 encoding/codecv4/codecv4_test.go diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index a6190d4..205f257 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -20,9 +20,6 @@ import ( "github.com/scroll-tech/da-codec/encoding/codecv0" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) - // MaxNumChunks is the maximum number of chunks that a batch can contain. const MaxNumChunks = 15 @@ -280,7 +277,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 5e4b464..3edf328 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -26,9 +26,6 @@ import ( "github.com/scroll-tech/da-codec/encoding/codecv1" ) -// BLSModulus is the BLS modulus defined in EIP-4844. -var BLSModulus = codecv1.BLSModulus - // MaxNumChunks is the maximum number of chunks that a batch can contain. const MaxNumChunks = 45 @@ -89,7 +86,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no conditional encode */, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -119,7 +116,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -193,15 +190,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM } } - if conditionalEncode { - encoded := len(blobBytes) < len(batchBytes) - if encoded { - blobBytes = append([]byte{1}, blobBytes...) - } else { - blobBytes = append([]byte{0}, batchBytes...) - } - } - if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") @@ -225,7 +213,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useM // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -320,7 +308,7 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err @@ -329,15 +317,11 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEnc if err != nil { return 0, 0, err } - blobBytesLen := uint64(len(blobBytes)) - if conditionalEncode { - blobBytesLen += 1 - } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err @@ -346,11 +330,7 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEnc if err != nil { return 0, 0, err } - blobBytesLen := uint64(len(blobBytes)) - if conditionalEncode { - blobBytesLen += 1 - } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLen), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index a623f66..3db2fe4 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -59,17 +59,17 @@ func TestCodecV2BlockEncode(t *testing.T) { encoded = hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - // sanity check: v0 and v1 block encodings are identical + // sanity check: v0 and v2 block encodings are identical for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv2, err := NewDABlock(trace, 0) assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) + encodedv2 := hex.EncodeToString(blockv2.Encode()) - assert.Equal(t, encodedv0, encodedv1) + assert.Equal(t, encodedv0, encodedv2) } } @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -870,52 +870,52 @@ func TestCodecV2BatchSkipBitmap(t *testing.T) { func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 8be9694..bfe0d2a 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -53,7 +53,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -80,7 +80,7 @@ func NewDABatch(batch *encoding.Batch, conditionalEncode bool) (*DABatch, error) } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, conditionalEncode, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -118,8 +118,8 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, conditionalEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { - return codecv2.ConstructBlobPayload(chunks, conditionalEncode, useMockTxData) +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + return codecv2.ConstructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -232,13 +232,13 @@ func (b *DABatch) Blob() *kzg4844.Blob { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, conditionalEncode bool) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c, conditionalEncode) +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, conditionalEncode bool) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b, conditionalEncode) +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index 080f743..0b22312 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -59,17 +59,17 @@ func TestCodecV3BlockEncode(t *testing.T) { encoded = hex.EncodeToString(block.Encode()) assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - // sanity check: v0 and v1 block encodings are identical + // sanity check: v0 and v3 block encodings are identical for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { blockv0, err := codecv0.NewDABlock(trace, 0) assert.NoError(t, err) encodedv0 := hex.EncodeToString(blockv0.Encode()) - blockv1, err := NewDABlock(trace, 0) + blockv3, err := NewDABlock(trace, 0) assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) + encodedv3 := hex.EncodeToString(blockv3.Encode()) - assert.Equal(t, encodedv0, encodedv1) + assert.Equal(t, encodedv0, encodedv3) } } @@ -217,7 +217,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) @@ -225,7 +225,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) @@ -233,7 +233,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) @@ -241,7 +241,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -249,7 +249,7 @@ func TestCodecV3BatchEncode(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) @@ -257,13 +257,13 @@ func TestCodecV3BatchEncode(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) @@ -271,7 +271,7 @@ func TestCodecV3BatchEncode(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = hex.EncodeToString(batch.Encode()) assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) @@ -285,54 +285,54 @@ func TestCodecV3BatchHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) } @@ -341,54 +341,54 @@ func TestCodecV3BatchDataHash(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) } @@ -397,7 +397,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) @@ -406,7 +406,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) @@ -415,7 +415,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) @@ -425,7 +425,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -434,7 +434,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -443,7 +443,7 @@ func TestCodecV3BatchBlob(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) @@ -451,7 +451,7 @@ func TestCodecV3BatchBlob(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) @@ -460,7 +460,7 @@ func TestCodecV3BatchBlob(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) @@ -471,55 +471,55 @@ func TestCodecV3BatchChallenge(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) } @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, false /* no conditional encode */, true /* use mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) @@ -710,7 +710,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err := batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -719,7 +719,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -728,7 +728,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -737,7 +737,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -755,7 +755,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -763,7 +763,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { // 15 chunks originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -772,7 +772,7 @@ func TestCodecV3BatchBlobDataProof(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) verifyData, err = batch.BlobDataProofForPointEvaluation() assert.NoError(t, err) @@ -783,7 +783,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, false /* no conditional encode */) + batch, err := NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -791,7 +791,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 0, int(batch.L1MessagePopped)) assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) @@ -799,7 +799,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) @@ -807,13 +807,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -821,7 +821,7 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) @@ -829,13 +829,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) @@ -843,13 +843,13 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 42, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, false /* no conditional encode */) + batch, err = NewDABatch(originalBatch) assert.NoError(t, err) assert.Equal(t, 32, int(batch.L1MessagePopped)) assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) @@ -858,52 +858,52 @@ func TestCodecV3BatchL1MessagePopped(t *testing.T) { func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, false /* no conditional encode */) + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) assert.NoError(t, err) assert.Equal(t, uint64(412), chunk2BatchBytesSize) assert.Equal(t, uint64(237), chunk2BlobSize) batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, false /* no conditional encode */) + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) assert.NoError(t, err) assert.Equal(t, uint64(412), batch2BatchBytesSize) assert.Equal(t, uint64(237), batch2BlobSize) trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, false /* no conditional encode */) + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) assert.NoError(t, err) assert.Equal(t, uint64(5863), chunk3BatchBytesSize) assert.Equal(t, uint64(2933), chunk3BlobSize) batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, false /* no conditional encode */) + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) assert.NoError(t, err) assert.Equal(t, uint64(5863), batch3BatchBytesSize) assert.Equal(t, uint64(2933), batch3BlobSize) trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, false /* no conditional encode */) + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk4BatchBytesSize) assert.Equal(t, uint64(54), chunk4BlobSize) batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, false /* no conditional encode */) + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) assert.NoError(t, err) assert.Equal(t, uint64(214), blob4BatchBytesSize) assert.Equal(t, uint64(54), batch4BlobSize) chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, false /* no conditional encode */) + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) assert.NoError(t, err) assert.Equal(t, uint64(6093), chunk5BatchBytesSize) assert.Equal(t, uint64(3149), chunk5BlobSize) chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, false /* no conditional encode */) + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) assert.NoError(t, err) assert.Equal(t, uint64(214), chunk6BatchBytesSize) assert.Equal(t, uint64(54), chunk6BlobSize) batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, false /* no conditional encode */) + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) assert.NoError(t, err) assert.Equal(t, uint64(6125), batch5BatchBytesSize) assert.Equal(t, uint64(3186), batch5BlobSize) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go new file mode 100644 index 0000000..9d16ef2 --- /dev/null +++ b/encoding/codecv4/codecv4.go @@ -0,0 +1,534 @@ +package codecv4 + +/* +#include +char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); +*/ +import "C" + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "math/big" + "unsafe" + + "github.com/scroll-tech/go-ethereum/accounts/abi" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/codecv1" + "github.com/scroll-tech/da-codec/encoding/codecv3" +) + +// MaxNumChunks is the maximum number of chunks that a batch can contain. +const MaxNumChunks = codecv3.MaxNumChunks + +// DABlock represents a Data Availability Block. +type DABlock = codecv3.DABlock + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk = codecv3.DAChunk + +// DABatch contains metadata about a batch of DAChunks. +type DABatch struct { + // header + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash common.Hash `json:"data_hash"` + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + ParentBatchHash common.Hash `json:"parent_batch_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point +} + +// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. +func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { + return codecv3.NewDABlock(block, totalL1MessagePoppedBefore) +} + +// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. +func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { + return codecv3.NewDAChunk(chunk, totalL1MessagePoppedBefore) +} + +// NewDABatch creates a DABatch from the provided encoding.Batch. +func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > MaxNumChunks { + return nil, errors.New("too many chunks in batch") + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("too few chunks in batch") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncoding, false /* no mock */) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + daBatch := DABatch{ + Version: uint8(encoding.CodecV4), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + BlobVersionedHash: blobVersionedHash, + ParentBatchHash: batch.ParentBatchHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + } + + daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() + if err != nil { + return nil, err + } + + return &daBatch, nil +} + +// ComputeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return codecv3.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} + +// ConstructBlobPayload constructs the 4844 blob payload. +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + if err != nil { + return nil, common.Hash{}, nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // if we have fewer than MaxNumChunks chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + var blobBytes []byte + if enableEncoding { + // blobBytes represents the compressed blob payload (batchBytes) + var err error + blobBytes, err = compressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, err + } + if !useMockTxData { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, err + } + } + blobBytes = append([]byte{1}, blobBytes...) + } else { + blobBytes = batchBytes + blobBytes = append([]byte{0}, batchBytes...) + } + + if len(blobBytes) > 126976 { + log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := MakeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, err + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + start := 32 - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields empty. +func NewDABatchFromBytes(data []byte) (*DABatch, error) { + if len(data) != 193 { + return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + } + + b := &DABatch{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + BlobVersionedHash: common.BytesToHash(data[57:89]), + ParentBatchHash: common.BytesToHash(data[89:121]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + BlobDataProof: [2]common.Hash{ + common.BytesToHash(data[129:161]), + common.BytesToHash(data[161:193]), + }, + } + + return b, nil +} + +// Encode serializes the DABatch into bytes. +func (b *DABatch) Encode() []byte { + batchBytes := make([]byte, 193) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) + copy(batchBytes[25:57], b.DataHash[:]) + copy(batchBytes[57:89], b.BlobVersionedHash[:]) + copy(batchBytes[89:121], b.ParentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) + copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatch) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// blobDataProofForPICircuit computes the abi-encoded blob verification data. +func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { + if b.blob == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") + } + if b.z == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") + } + + _, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of result: + // | z | y | + // |---------|---------| + // | bytes32 | bytes32 | + var result [2]common.Hash + result[0] = common.BytesToHash(b.z[:]) + result[1] = common.BytesToHash(y[:]) + + return result, nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatch) Blob() *kzg4844.Blob { + return b.blob +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { + batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + if err != nil { + return 0, 0, err + } + var blobBytesLength uint64 + if enableEncode { + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableEncode bool) (uint64, uint64, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return 0, 0, err + } + var blobBytesLength uint64 + if enableEncode { + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + blobBytesLength = 1 + uint64(len(blobBytes)) + } else { + blobBytesLength = 1 + uint64(len(batchBytes)) + } + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { + return codecv3.EstimateChunkL1CommitCalldataSize(c) +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { + return codecv3.EstimateBatchL1CommitCalldataSize(b) +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { + return codecv3.EstimateChunkL1CommitGas(c) +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { + return codecv3.EstimateBatchL1CommitGas(b) +} + +// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. +func GetBlobDataProofArgs() (*abi.Arguments, error) { + return codecv3.GetBlobDataProofArgs() +} + +// checkBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. +func checkBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := constructBatchPayload(b.Chunks) + if err != nil { + return false, err + } + blobBytes, err := compressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// constructBatchPayload constructs the batch payload. +// This function is only used in compressed batch payload length estimation. +func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // batch metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode batch metadata and L2 transactions, + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into batch payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) + if err != nil { + return nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // batch metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + } + return batchBytes, nil +} + +// compressScrollBatchBytes compresses the given batch of bytes. +// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + srcSize := C.uint64_t(len(batchBytes)) + outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbuf := make([]byte, outbufSize) + + if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, + (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { + return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) + } + + return outbuf[:int(outbufSize)], nil +} + +// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + return codecv1.MakeBlobCanonical(blobBytes) +} + +// CalculatePaddedBlobSize calculates the required size on blob storage +// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. +func CalculatePaddedBlobSize(dataSize uint64) uint64 { + return codecv1.CalculatePaddedBlobSize(dataSize) +} diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go new file mode 100644 index 0000000..7faf096 --- /dev/null +++ b/encoding/codecv4/codecv4_test.go @@ -0,0 +1,837 @@ +package codecv4 + +import ( + "encoding/hex" + "encoding/json" + "os" + "testing" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/codecv0" +) + +func TestCodecV4BlockEncode(t *testing.T) { + block := &DABlock{} + encoded := hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + block, err := NewDABlock(trace2, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + block, err = NewDABlock(trace3, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + block, err = NewDABlock(trace4, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + block, err = NewDABlock(trace5, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + block, err = NewDABlock(trace6, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + block, err = NewDABlock(trace7, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(block.Encode()) + assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) + + // sanity check: v0 and v4 block encodings are identical + for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { + blockv0, err := codecv0.NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv0 := hex.EncodeToString(blockv0.Encode()) + + blockv4, err := NewDABlock(trace, 0) + assert.NoError(t, err) + encodedv4 := hex.EncodeToString(blockv4.Encode()) + + assert.Equal(t, encodedv0, encodedv4) + } +} + +func TestCodecV4ChunkEncode(t *testing.T) { + // chunk with a single empty block + block := DABlock{} + chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + encoded := hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + // transactions are not part of the encoding + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err := NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + encoded = hex.EncodeToString(chunk.Encode()) + assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) +} + +func TestCodecV4ChunkHash(t *testing.T) { + // chunk with a single empty block + block := DABlock{} + chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} + hash, err := chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) + + // L1 transactions are part of the hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // L2 transactions are not part of the hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // numL1Messages are not part of the hash + chunk.Blocks[0].NumL1Messages = 1 + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) + + // invalid hash + chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) + _, err = chunk.Hash() + assert.Error(t, err) + + trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) + + trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") + originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} + chunk, err = NewDAChunk(originalChunk, 0) + assert.NoError(t, err) + hash, err = chunk.Hash() + assert.NoError(t, err) + assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) +} + +func TestCodecV4BatchEncode(t *testing.T) { + // empty batch + batch := &DABatch{Version: uint8(encoding.CodecV4)} + encoded := hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) + + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + encoded = hex.EncodeToString(batch.Encode()) + assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) +} + +func TestCodecV4BatchHash(t *testing.T) { + // empty batch + batch := &DABatch{Version: uint8(encoding.CodecV4)} + assert.Equal(t, "0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c", batch.Hash().Hex()) + + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e", batch.Hash().Hex()) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342", batch.Hash().Hex()) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) + + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) +} + +func TestCodecV4ChunkAndBatchCommitGasEstimation(t *testing.T) { + block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} + chunk2Gas := EstimateChunkL1CommitGas(chunk2) + assert.Equal(t, uint64(51124), chunk2Gas) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2Gas := EstimateBatchL1CommitGas(batch2) + assert.Equal(t, uint64(207649), batch2Gas) + + block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} + chunk3Gas := EstimateChunkL1CommitGas(chunk3) + assert.Equal(t, uint64(51124), chunk3Gas) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3Gas := EstimateBatchL1CommitGas(batch3) + assert.Equal(t, uint64(207649), batch3Gas) + + block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} + chunk4Gas := EstimateChunkL1CommitGas(chunk4) + assert.Equal(t, uint64(53745), chunk4Gas) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch4Gas := EstimateBatchL1CommitGas(batch4) + assert.Equal(t, uint64(210302), batch4Gas) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} + chunk5Gas := EstimateChunkL1CommitGas(chunk5) + assert.Equal(t, uint64(52202), chunk5Gas) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} + chunk6Gas := EstimateChunkL1CommitGas(chunk6) + assert.Equal(t, uint64(53745), chunk6Gas) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5Gas := EstimateBatchL1CommitGas(batch5) + assert.Equal(t, uint64(213087), batch5Gas) +} + +func repeat(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) +} + +func TestCodecV4BatchStandardTestCases(t *testing.T) { + // Taking into consideration compression, we allow up to 5x of max blob bytes. + // We then ignore the metadata rows for 45 chunks. + maxChunks := 45 + nRowsData := 5*126976 - (maxChunks*4 + 2) + + for _, tc := range []struct { + chunks [][]string + expectedz string + expectedy string + expectedBlobVersionedHash string + expectedBatchHash string + }{ + // single empty chunk + {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, + // single non-empty chunk + {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, + // multiple empty chunks + {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, + // multiple non-empty chunks + {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, + // empty chunk followed by non-empty chunk + {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, + // non-empty chunk followed by empty chunk + {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, + // max number of chunks all empty + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, + // max number of chunks all non-empty + {chunks: [][]string{ + {"0x0a"}, + {"0x0a0b"}, + {"0x0a0b0c"}, + {"0x0a0b0c0d"}, + {"0x0a0b0c0d0e"}, + {"0x0a0b0c0d0e0f"}, + {"0x0a0b0c0d0e0f10"}, + {"0x0a0b0c0d0e0f1011"}, + {"0x0a0b0c0d0e0f101112"}, + {"0x0a0b0c0d0e0f10111213"}, + {"0x0a0b0c0d0e0f1011121314"}, + {"0x0a0b0c0d0e0f101112131415"}, + {"0x0a0b0c0d0e0f10111213141516"}, + {"0x0a0b0c0d0e0f1011121314151617"}, + {"0x0a0b0c0d0e0f101112131415161718"}, + {"0x0a0b0c0d0e0f10111213141516171819"}, + {"0x0a0b0c0d0e0f101112131415161718191a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, + {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, + }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, + // single chunk blob full + {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "37ca5366d9f5ddd9471f074f8019050ea6a13097368e84f298ffa1bd806ad851", expectedy: "5aa602da97cc438a039431c799b5f97467bcd45e693273dd1215f201b19fa5bd", expectedBlobVersionedHash: "01e531e7351a271839b2ae6ddec58818efd5f426fd6a7c0bc5c33c9171ed74bf", expectedBatchHash: "d3809d6b2fd10a62c6c58f9e7c32772f4ac062a78d363f46cd3ee301e87dbad2"}, + // multiple chunks blob full + {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "250fc907e7ba3b5affb90a624566e337b02dd89a265677571cc0d1c51b60af19", expectedy: "1b2898bb001d962717159f49b015ae7228b21e9a590f836be0d79a0870c7d82b", expectedBlobVersionedHash: "01f3c431a72bbfd43c42dbd638d7f6d109be2b9449b96386b214f92b9e28ccc4", expectedBatchHash: "a51631991f6210b13e9c8ac9260704cca29fdc08adcfbd210053dc77c956e82f"}, + // max number of chunks only last one non-empty not full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6ba09c6123b374f1828ce5b3e52c69ac7e2251f1a573ba4d51e71b386eef9c38", expectedy: "3104f9e81ecf4ade3281cc8ea68c4f451341388e2a2c84be4b5e5ed938b6bb26", expectedBlobVersionedHash: "017813036e3c57d5259d5b1d89ca0fe253e43d740f5ee287eabc916b3486f15d", expectedBatchHash: "ebfaf617cc91d9147b00968263993f70e0efc57c1189877092a87ea60b55a2d7"}, + // max number of chunks only last one non-empty full blob + {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "295f6ba39b866f6635a1e11ffe16badf42174ba120bdcb973806620370f665fc", expectedy: "553772861d517aefd58332d87d75a388523b40dbd69c1d73b7d78fd18d895513", expectedBlobVersionedHash: "013a5cb4a098dfa068b82acea202eac5c7b1ec8f16c7cb37b2a9629e7359a4b1", expectedBatchHash: "b4c58eb1be9b2b21f6a43b4170ee92d6ee0af46e20848fff508a07d40b2bac29"}, + // max number of chunks but last is empty + {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, + } { + chunks := []*encoding.Chunk{} + + for _, c := range tc.chunks { + block := &encoding.Block{Transactions: []*types.TransactionData{}} + + for _, data := range c { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + + chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} + chunks = append(chunks, chunk) + } + + blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + require.NoError(t, err) + actualZ := hex.EncodeToString(z[:]) + assert.Equal(t, tc.expectedz, actualZ) + assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + + _, y, err := kzg4844.ComputeProof(blob, *z) + require.NoError(t, err) + actualY := hex.EncodeToString(y[:]) + assert.Equal(t, tc.expectedy, actualY) + + // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) + dataBytes := make([]byte, 32*len(chunks)) + for i := range chunks { + copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) + } + dataHash := crypto.Keccak256Hash(dataBytes) + + batch := DABatch{ + Version: uint8(encoding.CodecV4), + BatchIndex: 6789, + L1MessagePopped: 101, + TotalL1MessagePopped: 10101, + DataHash: dataHash, + BlobVersionedHash: blobVersionedHash, + ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), + LastBlockTimestamp: 192837, + blob: blob, + z: z, + } + + batch.BlobDataProof, err = batch.blobDataProofForPICircuit() + require.NoError(t, err) + + assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) + } +} + +func TestCodecV4BatchL1MessagePopped(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch, err := NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 0, int(batch.L1MessagePopped)) + assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 0, int(batch.L1MessagePopped)) + assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 + assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) + + trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 37 + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 + assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) + + trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") + chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 + assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 1 + batch, err = NewDABatch(originalBatch, false /* disable encode */) + assert.NoError(t, err) + assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 + assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) + + chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 + chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 + originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 42, int(batch.L1MessagePopped)) + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) + + originalBatch.TotalL1MessagePoppedBefore = 10 + batch, err = NewDABatch(originalBatch, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, 32, int(batch.L1MessagePopped)) + assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) +} + +func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(412), chunk2BatchBytesSize) + assert.Equal(t, uint64(238), chunk2BlobSize) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(412), batch2BatchBytesSize) + assert.Equal(t, uint64(238), batch2BlobSize) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), chunk3BatchBytesSize) + assert.Equal(t, uint64(2934), chunk3BlobSize) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(5863), batch3BatchBytesSize) + assert.Equal(t, uint64(2934), batch3BlobSize) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk4BatchBytesSize) + assert.Equal(t, uint64(55), chunk4BlobSize) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), blob4BatchBytesSize) + assert.Equal(t, uint64(55), batch4BlobSize) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} + chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(6093), chunk5BatchBytesSize) + assert.Equal(t, uint64(3150), chunk5BlobSize) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(214), chunk6BatchBytesSize) + assert.Equal(t, uint64(55), chunk6BlobSize) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enble encode */) + assert.NoError(t, err) + assert.Equal(t, uint64(6125), batch5BatchBytesSize) + assert.Equal(t, uint64(3187), batch5BlobSize) +} + +func TestCodecV4ChunkAndBatchCalldataSizeEstimation(t *testing.T) { + trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") + chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} + chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) + assert.Equal(t, uint64(60), chunk2CalldataSize) + batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} + batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) + assert.Equal(t, uint64(60), batch2CalldataSize) + + trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") + chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} + chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) + assert.Equal(t, uint64(60), chunk3CalldataSize) + batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} + batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) + assert.Equal(t, uint64(60), batch3CalldataSize) + + trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") + chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) + assert.Equal(t, uint64(60), chunk4CalldataSize) + batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} + batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) + assert.Equal(t, uint64(60), batch4CalldataSize) + + chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} + chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) + assert.Equal(t, uint64(120), chunk5CalldataSize) + chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} + chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) + assert.Equal(t, uint64(60), chunk6CalldataSize) + batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} + batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) + assert.Equal(t, uint64(180), batch5CalldataSize) +} + +func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { + t.Run("Case 1", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293212, + "l1_message_popped": 7, + "total_l1_message_popped": 904750, + "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", + "last_block_timestamp": 1721130505, + "blob_data_proof": [ + "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", + "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(4), batch.Version) + assert.Equal(t, uint64(293212), batch.BatchIndex) + assert.Equal(t, uint64(7), batch.L1MessagePopped) + assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) + assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0x64ba42153a4f642b2d8a37cf74a53067c37bba7389b85e7e07521f584e6b73d0") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) + + t.Run("Case 2", func(t *testing.T) { + jsonStr := `{ + "version": 5, + "batch_index": 123, + "l1_message_popped": 0, + "total_l1_message_popped": 0, + "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", + "last_block_timestamp": 1720174236, + "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", + "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", + "blob_data_proof": [ + "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", + "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(5), batch.Version) + assert.Equal(t, uint64(123), batch.BatchIndex) + assert.Equal(t, uint64(0), batch.L1MessagePopped) + assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) + assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0xd14f142dbc5c384e9920d5bf82c6bbf7c98030ffd7a3cace6c8a6e9639a285f9") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) + + t.Run("Case 3", func(t *testing.T) { + jsonStr := `{ + "version": 4, + "batch_index": 293205, + "l1_message_popped": 0, + "total_l1_message_popped": 904737, + "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", + "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", + "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", + "last_block_timestamp": 1721129563, + "blob_data_proof": [ + "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", + "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" + ] + }` + + var batch DABatch + err := json.Unmarshal([]byte(jsonStr), &batch) + require.NoError(t, err) + + assert.Equal(t, uint8(4), batch.Version) + assert.Equal(t, uint64(293205), batch.BatchIndex) + assert.Equal(t, uint64(0), batch.L1MessagePopped) + assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) + assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) + assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) + assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) + assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) + assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) + assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) + + batchHash := batch.Hash() + + expectedHash := common.HexToHash("0x19638ca802926b93946fe281666205958838d46172587d150ca4c720ae244cd3") + assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") + + // Marshal and Unmarshal test + data, err := json.Marshal(&batch) + require.NoError(t, err) + + var decodedBatch DABatch + err = json.Unmarshal(data, &decodedBatch) + require.NoError(t, err) + + assert.Equal(t, batch, decodedBatch) + }) +} + +func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { + data, err := os.ReadFile(filename) + assert.NoError(t, err) + + block := &encoding.Block{} + assert.NoError(t, json.Unmarshal(data, block)) + return block +} diff --git a/encoding/da.go b/encoding/da.go index 35befdb..4e88635 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -2,12 +2,16 @@ package encoding import ( "fmt" + "math/big" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" ) +// BLSModulus is the BLS modulus defined in EIP-4844. +var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) + // CodecVersion defines the version of encoder and decoder. type CodecVersion uint8 @@ -23,6 +27,9 @@ const ( // CodecV3 represents the version 3 of the encoder and decoder. CodecV3 + + // CodecV4 represents the version 4 of the encoder and decoder. + CodecV4 ) // Block represents an L2 block. From e4bf12e26677cc5994677257d50033e980a16a5a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sun, 18 Aug 2024 23:40:36 +0800 Subject: [PATCH 04/46] align naming --- encoding/codecv4/codecv4.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 9d16ef2..05bbff8 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -65,7 +65,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -92,7 +92,7 @@ func NewDABatch(batch *encoding.Batch, enableEncoding bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncoding, false /* no mock */) + blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) if err != nil { return nil, err } @@ -130,7 +130,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -190,7 +190,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncoding bool, useMock copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableEncoding { + if enableEncode { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = compressScrollBatchBytes(batchBytes) From 030349d59730d9624a5f7d7be0165740ed6ee3d9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 16:50:52 +0800 Subject: [PATCH 05/46] add ConvertBlobToBlobBytes utility functions --- encoding/codecv3/codecv3.go | 28 ++++++++++++++++++++++++++++ encoding/codecv4/codecv4.go | 29 +++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index bfe0d2a..d3e9570 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -231,6 +231,34 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } +// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. +func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { + var blobBytes [126976]byte + + for from := 0; from < len(b.blob); from += 32 { + copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) + } + + metadataLength := 2 + MaxNumChunks*4 + numChunks := binary.BigEndian.Uint16(blobBytes[:2]) + + if numChunks > MaxNumChunks { + return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) + } + + totalSize := metadataLength + for i := 0; i < int(numChunks); i++ { + chunkSize := binary.BigEndian.Uint32(blobBytes[2+4*i:]) + totalSize += int(chunkSize) + + if totalSize > len(blobBytes) { + return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) + } + } + + return blobBytes[:totalSize], nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 05bbff8..3da14cf 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -353,6 +353,35 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } +// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. +func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { + var blobBytes [126976]byte + + for from := 0; from < len(b.blob); from += 32 { + copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) + } + + startIndex := 1 // Skip the flag byte in codecv4 + metadataLength := startIndex + 2 + MaxNumChunks*4 + numChunks := binary.BigEndian.Uint16(blobBytes[startIndex : startIndex+2]) + + if numChunks > MaxNumChunks { + return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) + } + + totalSize := metadataLength + for i := 0; i < int(numChunks); i++ { + chunkSize := binary.BigEndian.Uint32(blobBytes[startIndex+2+4*i:]) + totalSize += int(chunkSize) + + if totalSize > len(blobBytes) { + return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) + } + } + + return blobBytes[:totalSize], nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) From ed4de9e92f76c5a6cfee00c59ff7783f4a1c5a9d Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 17:08:15 +0800 Subject: [PATCH 06/46] kept blob bytes --- encoding/codecv2/codecv2.go | 18 +++++------ encoding/codecv2/codecv2_test.go | 2 +- encoding/codecv3/codecv3.go | 37 ++++++----------------- encoding/codecv3/codecv3_test.go | 2 +- encoding/codecv4/codecv4.go | 52 ++++++++++---------------------- encoding/codecv4/codecv4_test.go | 2 +- 6 files changed, 37 insertions(+), 76 deletions(-) diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index 3edf328..b5ed267 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -86,7 +86,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -116,7 +116,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -147,7 +147,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // encode L2 txs into blob payload rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } batchBytes = append(batchBytes, rlpTxData...) } @@ -178,7 +178,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // blobBytes represents the compressed blob payload (batchBytes) blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // Only apply this check when the uncompressed batch data has exceeded 128 KiB. @@ -186,25 +186,25 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // Check compressed data compatibility. if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } } if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -221,7 +221,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 start := 32 - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, nil + return blob, blobVersionedHash, &z, blobBytes, nil } // MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go index 3db2fe4..c34f608 100644 --- a/encoding/codecv2/codecv2_test.go +++ b/encoding/codecv2/codecv2_test.go @@ -674,7 +674,7 @@ func TestCodecV2BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index d3e9570..5c82d10 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -40,6 +40,9 @@ type DABatch struct { // blob payload blob *kzg4844.Blob z *kzg4844.Point + + // for batch task + blobBytes []byte } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. @@ -80,7 +83,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -99,6 +102,7 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { LastBlockTimestamp: lastBlock.Header.Time, blob: blob, z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -118,7 +122,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { return codecv2.ConstructBlobPayload(chunks, useMockTxData) } @@ -231,32 +235,9 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } -// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. -func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { - var blobBytes [126976]byte - - for from := 0; from < len(b.blob); from += 32 { - copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) - } - - metadataLength := 2 + MaxNumChunks*4 - numChunks := binary.BigEndian.Uint16(blobBytes[:2]) - - if numChunks > MaxNumChunks { - return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) - } - - totalSize := metadataLength - for i := 0; i < int(numChunks); i++ { - chunkSize := binary.BigEndian.Uint32(blobBytes[2+4*i:]) - totalSize += int(chunkSize) - - if totalSize > len(blobBytes) { - return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) - } - } - - return blobBytes[:totalSize], nil +// BlobBytes returns the blob bytes of the batch. +func (b *DABatch) BlobBytes() []byte { + return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go index 0b22312..fef0c12 100644 --- a/encoding/codecv3/codecv3_test.go +++ b/encoding/codecv3/codecv3_test.go @@ -668,7 +668,7 @@ func TestCodecV3BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 3da14cf..8ab046a 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -52,6 +52,9 @@ type DABatch struct { // blob payload blob *kzg4844.Blob z *kzg4844.Point + + // for batch task + blobBytes []byte } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. @@ -92,7 +95,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) if err != nil { return nil, err } @@ -111,6 +114,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { LastBlockTimestamp: lastBlock.Header.Time, blob: blob, z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -130,7 +134,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -161,7 +165,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx // encode L2 txs into blob payload rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } batchBytes = append(batchBytes, rlpTxData...) } @@ -195,13 +199,13 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx var err error blobBytes, err = compressScrollBatchBytes(batchBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } if !useMockTxData { // Check compressed data compatibility. if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } } blobBytes = append([]byte{1}, blobBytes...) @@ -212,19 +216,19 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx if len(blobBytes) > 126976 { log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("Blob payload exceeds maximum size") + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } // convert raw data to BLSFieldElements blob, err := MakeBlobCanonical(blobBytes) if err != nil { - return nil, common.Hash{}, nil, err + return nil, common.Hash{}, nil, nil, err } // compute blob versioned hash c, err := kzg4844.BlobToCommitment(blob) if err != nil { - return nil, common.Hash{}, nil, errors.New("failed to create blob commitment") + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") } blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) @@ -241,7 +245,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx start := 32 - len(pointBytes) copy(z[start:], pointBytes) - return blob, blobVersionedHash, &z, nil + return blob, blobVersionedHash, &z, blobBytes, nil } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -353,33 +357,9 @@ func (b *DABatch) Blob() *kzg4844.Blob { return b.blob } -// ConvertBlobToBlobBytes converts the canonical blob representation into DA blob bytes. -func (b *DABatch) ConvertBlobToBlobBytes() ([]byte, error) { - var blobBytes [126976]byte - - for from := 0; from < len(b.blob); from += 32 { - copy(blobBytes[from/32*31:], b.blob[from+1:from+32]) - } - - startIndex := 1 // Skip the flag byte in codecv4 - metadataLength := startIndex + 2 + MaxNumChunks*4 - numChunks := binary.BigEndian.Uint16(blobBytes[startIndex : startIndex+2]) - - if numChunks > MaxNumChunks { - return nil, fmt.Errorf("number of chunks (%d) exceeds maximum allowed chunks (%d)", numChunks, MaxNumChunks) - } - - totalSize := metadataLength - for i := 0; i < int(numChunks); i++ { - chunkSize := binary.BigEndian.Uint32(blobBytes[startIndex+2+4*i:]) - totalSize += int(chunkSize) - - if totalSize > len(blobBytes) { - return nil, fmt.Errorf("calculated total size (%d) exceeds the length of blobBytes (%d)", totalSize, len(blobBytes)) - } - } - - return blobBytes[:totalSize], nil +// BlobBytes returns the blob bytes of the batch. +func (b *DABatch) BlobBytes() []byte { + return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go index 7faf096..a824c64 100644 --- a/encoding/codecv4/codecv4_test.go +++ b/encoding/codecv4/codecv4_test.go @@ -480,7 +480,7 @@ func TestCodecV4BatchStandardTestCases(t *testing.T) { chunks = append(chunks, chunk) } - blob, blobVersionedHash, z, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) + blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) require.NoError(t, err) actualZ := hex.EncodeToString(z[:]) assert.Equal(t, tc.expectedz, actualZ) From c6af3bbe7068da2b356509f2cb9eaf6c8d514bdf Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 19 Aug 2024 18:09:36 +0800 Subject: [PATCH 07/46] rename enableEncode to enableCompress --- encoding/codecv4/codecv4.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 8ab046a..4402b6a 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -68,7 +68,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { +func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -95,7 +95,7 @@ func NewDABatch(batch *encoding.Batch, enableEncode bool) (*DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableEncode, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableCompress, false /* no mock */) if err != nil { return nil, err } @@ -134,7 +134,7 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + MaxNumChunks*4 @@ -194,7 +194,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableEncode bool, useMockTx copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableEncode { + if enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = compressScrollBatchBytes(batchBytes) @@ -363,13 +363,13 @@ func (b *DABatch) BlobBytes() []byte { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode bool) (uint64, uint64, error) { +func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableEncode { + if enableCompress { blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -382,13 +382,13 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableEncode b } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableEncode bool) (uint64, uint64, error) { +func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { batchBytes, err := constructBatchPayload(b.Chunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableEncode { + if enableCompress { blobBytes, err := compressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err From a5691d4eee3baa866a9d217f8d390fc8bc2efa77 Mon Sep 17 00:00:00 2001 From: colin <102356659+colinlyguo@users.noreply.github.com> Date: Tue, 20 Aug 2024 16:45:50 +0800 Subject: [PATCH 08/46] refactor: move some common functions to encoding (#24) * refactor: move some common functions to encoding * fix golint --- encoding/codecv1/codecv1.go | 87 +----------- encoding/codecv2/codecv2.go | 105 ++------------- encoding/codecv3/codecv3.go | 8 +- encoding/codecv4/codecv4.go | 125 ++---------------- encoding/da.go | 120 +++++++++++++++++ .../libscroll_zstd_darwin_arm64.a | Bin .../libscroll_zstd_darwin_arm64.go | 2 +- .../libscroll_zstd_linux_amd64.a | Bin .../libscroll_zstd_linux_amd64.go | 2 +- .../libscroll_zstd_linux_arm64.a | Bin .../libscroll_zstd_linux_arm64.go | 2 +- encoding/zstd/zstd.go | 26 ++++ 12 files changed, 182 insertions(+), 295 deletions(-) rename encoding/{codecv2 => zstd}/libscroll_zstd_darwin_arm64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_darwin_arm64.go (81%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_amd64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_amd64.go (86%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_arm64.a (100%) rename encoding/{codecv2 => zstd}/libscroll_zstd_linux_arm64.go (86%) create mode 100644 encoding/zstd/zstd.go diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 205f257..4ed048b 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -8,9 +8,7 @@ import ( "fmt" "math/big" "strings" - "sync" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -260,7 +258,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -288,31 +286,6 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return blob, blobVersionedHash, &z, nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - // blob contains 131072 bytes but we can only utilize 31/32 of these - if len(blobBytes) > 126976 { - return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) - } - - // the canonical (padded) blob payload - var blob kzg4844.Blob - - // encode blob payload by prepending every 31 bytes with 1 zero byte - index := 0 - - for from := 0; from < len(blobBytes); from += 31 { - to := from + 31 - if to > len(blobBytes) { - to = len(blobBytes) - } - copy(blob[index+1:], blobBytes[from:to]) - index += 32 - } - - return &blob, nil -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func NewDABatchFromBytes(data []byte) (*DABatch, error) { @@ -379,7 +352,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -398,7 +371,7 @@ func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { if err != nil { return 0, err } - return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. @@ -412,7 +385,7 @@ func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { } batchDataSize += chunkDataSize } - return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { @@ -550,55 +523,3 @@ func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { } return totalL1CommitCalldataSize } - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - paddedSize := (dataSize / 31) * 32 - - if dataSize%31 != 0 { - paddedSize += 1 + dataSize%31 // Add 1 byte for the first empty byte plus the remainder bytes - } - - return paddedSize -} - -var ( - blobDataProofArgs *abi.Arguments - initBlobDataProofArgsOnce sync.Once -) - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - var initError error - - initBlobDataProofArgsOnce.Do(func() { - // Initialize bytes32 type - bytes32Type, err := abi.NewType("bytes32", "bytes32", nil) - if err != nil { - initError = fmt.Errorf("failed to initialize abi type bytes32: %w", err) - return - } - - // Initialize bytes48 type - bytes48Type, err := abi.NewType("bytes48", "bytes48", nil) - if err != nil { - initError = fmt.Errorf("failed to initialize abi type bytes48: %w", err) - return - } - - // Successfully create the argument list - blobDataProofArgs = &abi.Arguments{ - {Type: bytes32Type, Name: "z"}, - {Type: bytes32Type, Name: "y"}, - {Type: bytes48Type, Name: "kzg_commitment"}, - {Type: bytes48Type, Name: "kzg_proof"}, - } - }) - - if initError != nil { - return nil, initError - } - - return blobDataProofArgs, nil -} diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2/codecv2.go index b5ed267..7588394 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -1,11 +1,5 @@ package codecv2 -/* -#include -char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); -*/ -import "C" - import ( "crypto/sha256" "encoding/binary" @@ -13,9 +7,7 @@ import ( "errors" "fmt" "math/big" - "unsafe" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -24,6 +16,7 @@ import ( "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/codecv1" + "github.com/scroll-tech/da-codec/encoding/zstd" ) // MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -176,7 +169,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // blobBytes represents the compressed blob payload (batchBytes) - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -196,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -224,11 +217,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 return blob, blobVersionedHash, &z, blobBytes, nil } -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - return codecv1.MakeBlobCanonical(blobBytes) -} - // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. func NewDABatchFromBytes(data []byte) (*DABatch, error) { @@ -295,7 +283,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -309,38 +297,38 @@ func (b *DABatch) Blob() *kzg4844.Blob { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return 0, 0, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return 0, 0, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -358,11 +346,11 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -401,68 +389,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv1.EstimateBatchL1CommitGas(b) } - -// constructBatchPayload constructs the batch payload. -// This function is only used in compressed batch payload length estimation. -func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // batch metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode batch metadata and L2 transactions, - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into batch payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - } - return batchBytes, nil -} - -// compressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. -func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { - srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes - outbuf := make([]byte, outbufSize) - - if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, - (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { - return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) - } - - return outbuf[:int(outbufSize)], nil -} - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - return codecv1.CalculatePaddedBlobSize(dataSize) -} - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv1.GetBlobDataProofArgs() -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go index 5c82d10..0a85efa 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3/codecv3.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -223,7 +222,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -279,8 +278,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv2.EstimateBatchL1CommitGas(b) + 50000 // plus 50000 for the point-evaluation precompile call. } - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv2.GetBlobDataProofArgs() -} diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4/codecv4.go index 4402b6a..b07e2be 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -1,11 +1,5 @@ package codecv4 -/* -#include -char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); -*/ -import "C" - import ( "crypto/sha256" "encoding/binary" @@ -13,9 +7,7 @@ import ( "errors" "fmt" "math/big" - "unsafe" - "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -23,8 +15,8 @@ import ( "github.com/scroll-tech/go-ethereum/log" "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" "github.com/scroll-tech/da-codec/encoding/codecv3" + "github.com/scroll-tech/da-codec/encoding/zstd" ) // MaxNumChunks is the maximum number of chunks that a batch can contain. @@ -197,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock if enableCompress { // blobBytes represents the compressed blob payload (batchBytes) var err error - blobBytes, err = compressScrollBatchBytes(batchBytes) + blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -210,7 +202,6 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } blobBytes = append([]byte{1}, blobBytes...) } else { - blobBytes = batchBytes blobBytes = append([]byte{0}, batchBytes...) } @@ -220,7 +211,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -345,7 +336,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() + blobDataProofArgs, err := encoding.GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -364,13 +355,13 @@ func (b *DABatch) BlobBytes() []byte { // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 if enableCompress { - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } @@ -378,18 +369,18 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 if enableCompress { - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err } @@ -397,16 +388,16 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := constructBatchPayload([]*encoding.Chunk{c}) + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -419,11 +410,11 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) if err != nil { return false, err } - blobBytes, err := compressScrollBatchBytes(batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return false, err } @@ -453,91 +444,3 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { return codecv3.EstimateBatchL1CommitGas(b) } - -// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. -func GetBlobDataProofArgs() (*abi.Arguments, error) { - return codecv3.GetBlobDataProofArgs() -} - -// checkBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -// It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func checkBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := constructBatchPayload(b.Chunks) - if err != nil { - return false, err - } - blobBytes, err := compressScrollBatchBytes(batchBytes) - if err != nil { - return false, err - } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return false, nil - } - return true, nil -} - -// constructBatchPayload constructs the batch payload. -// This function is only used in compressed batch payload length estimation. -func constructBatchPayload(chunks []*encoding.Chunk) ([]byte, error) { - // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 - - // batchBytes represents the raw (un-compressed and un-padded) blob payload - batchBytes := make([]byte, metadataLength) - - // batch metadata: num_chunks - binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) - - // encode batch metadata and L2 transactions, - for chunkID, chunk := range chunks { - currentChunkStartIndex := len(batchBytes) - - for _, block := range chunk.Blocks { - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { - continue - } - - // encode L2 txs into batch payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) - if err != nil { - return nil, err - } - batchBytes = append(batchBytes, rlpTxData...) - } - } - - // batch metadata: chunki_size - if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { - binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) - } - } - return batchBytes, nil -} - -// compressScrollBatchBytes compresses the given batch of bytes. -// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. -func compressScrollBatchBytes(batchBytes []byte) ([]byte, error) { - srcSize := C.uint64_t(len(batchBytes)) - outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes - outbuf := make([]byte, outbufSize) - - if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, - (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { - return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) - } - - return outbuf[:int(outbufSize)], nil -} - -// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. -func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { - return codecv1.MakeBlobCanonical(blobBytes) -} - -// CalculatePaddedBlobSize calculates the required size on blob storage -// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. -func CalculatePaddedBlobSize(dataSize uint64) uint64 { - return codecv1.CalculatePaddedBlobSize(dataSize) -} diff --git a/encoding/da.go b/encoding/da.go index 4e88635..b085351 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -1,12 +1,16 @@ package encoding import ( + "encoding/binary" "fmt" "math/big" + "sync" + "github.com/scroll-tech/go-ethereum/accounts/abi" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -326,3 +330,119 @@ func CheckCompressedDataCompatibility(data []byte) error { return nil } + +// MakeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements. +func MakeBlobCanonical(blobBytes []byte) (*kzg4844.Blob, error) { + // blob contains 131072 bytes but we can only utilize 31/32 of these + if len(blobBytes) > 126976 { + return nil, fmt.Errorf("oversized batch payload, blob bytes length: %v, max length: %v", len(blobBytes), 126976) + } + + // the canonical (padded) blob payload + var blob kzg4844.Blob + + // encode blob payload by prepending every 31 bytes with 1 zero byte + index := 0 + + for from := 0; from < len(blobBytes); from += 31 { + to := from + 31 + if to > len(blobBytes) { + to = len(blobBytes) + } + copy(blob[index+1:], blobBytes[from:to]) + index += 32 + } + + return &blob, nil +} + +var ( + blobDataProofArgs *abi.Arguments + initBlobDataProofArgsOnce sync.Once +) + +// GetBlobDataProofArgs gets the blob data proof arguments for batch commitment and returns error if initialization fails. +func GetBlobDataProofArgs() (*abi.Arguments, error) { + var initError error + + initBlobDataProofArgsOnce.Do(func() { + // Initialize bytes32 type + bytes32Type, err := abi.NewType("bytes32", "bytes32", nil) + if err != nil { + initError = fmt.Errorf("failed to initialize abi type bytes32: %w", err) + return + } + + // Initialize bytes48 type + bytes48Type, err := abi.NewType("bytes48", "bytes48", nil) + if err != nil { + initError = fmt.Errorf("failed to initialize abi type bytes48: %w", err) + return + } + + // Successfully create the argument list + blobDataProofArgs = &abi.Arguments{ + {Type: bytes32Type, Name: "z"}, + {Type: bytes32Type, Name: "y"}, + {Type: bytes48Type, Name: "kzg_commitment"}, + {Type: bytes48Type, Name: "kzg_proof"}, + } + }) + + if initError != nil { + return nil, initError + } + + return blobDataProofArgs, nil +} + +// CalculatePaddedBlobSize calculates the required size on blob storage +// where every 32 bytes can store only 31 bytes of actual data, with the first byte being zero. +func CalculatePaddedBlobSize(dataSize uint64) uint64 { + paddedSize := (dataSize / 31) * 32 + + if dataSize%31 != 0 { + paddedSize += 1 + dataSize%31 // Add 1 byte for the first empty byte plus the remainder bytes + } + + return paddedSize +} + +// ConstructBatchPayloadInBlob constructs the batch payload. +// This function is only used in compressed batch payload length estimation. +func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // batch metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode batch metadata and L2 transactions, + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into batch payload + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + if err != nil { + return nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // batch metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + } + return batchBytes, nil +} diff --git a/encoding/codecv2/libscroll_zstd_darwin_arm64.a b/encoding/zstd/libscroll_zstd_darwin_arm64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_darwin_arm64.a rename to encoding/zstd/libscroll_zstd_darwin_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_darwin_arm64.go b/encoding/zstd/libscroll_zstd_darwin_arm64.go similarity index 81% rename from encoding/codecv2/libscroll_zstd_darwin_arm64.go rename to encoding/zstd/libscroll_zstd_darwin_arm64.go index 8ace74c..d83ec17 100644 --- a/encoding/codecv2/libscroll_zstd_darwin_arm64.go +++ b/encoding/zstd/libscroll_zstd_darwin_arm64.go @@ -1,4 +1,4 @@ -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_darwin_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_amd64.a b/encoding/zstd/libscroll_zstd_linux_amd64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_linux_amd64.a rename to encoding/zstd/libscroll_zstd_linux_amd64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_amd64.go b/encoding/zstd/libscroll_zstd_linux_amd64.go similarity index 86% rename from encoding/codecv2/libscroll_zstd_linux_amd64.go rename to encoding/zstd/libscroll_zstd_linux_amd64.go index 0b22575..f1a686e 100644 --- a/encoding/codecv2/libscroll_zstd_linux_amd64.go +++ b/encoding/zstd/libscroll_zstd_linux_amd64.go @@ -1,7 +1,7 @@ //go:build !musl // +build !musl -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_linux_amd64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_arm64.a b/encoding/zstd/libscroll_zstd_linux_arm64.a similarity index 100% rename from encoding/codecv2/libscroll_zstd_linux_arm64.a rename to encoding/zstd/libscroll_zstd_linux_arm64.a diff --git a/encoding/codecv2/libscroll_zstd_linux_arm64.go b/encoding/zstd/libscroll_zstd_linux_arm64.go similarity index 86% rename from encoding/codecv2/libscroll_zstd_linux_arm64.go rename to encoding/zstd/libscroll_zstd_linux_arm64.go index ebf3943..f3775d2 100644 --- a/encoding/codecv2/libscroll_zstd_linux_arm64.go +++ b/encoding/zstd/libscroll_zstd_linux_arm64.go @@ -1,7 +1,7 @@ //go:build !musl // +build !musl -package codecv2 +package zstd /* #cgo LDFLAGS: ${SRCDIR}/libscroll_zstd_linux_arm64.a diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go new file mode 100644 index 0000000..58eab2b --- /dev/null +++ b/encoding/zstd/zstd.go @@ -0,0 +1,26 @@ +package zstd + +/* +#include +char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); +*/ +import "C" +import ( + "fmt" + "unsafe" +) + +// CompressScrollBatchBytes compresses the given batch of bytes. +// The output buffer is allocated with an extra 128 bytes to accommodate metadata overhead or error message. +func CompressScrollBatchBytes(batchBytes []byte) ([]byte, error) { + srcSize := C.uint64_t(len(batchBytes)) + outbufSize := C.uint64_t(len(batchBytes) + 128) // Allocate output buffer with extra 128 bytes + outbuf := make([]byte, outbufSize) + + if err := C.compress_scroll_batch_bytes((*C.uchar)(unsafe.Pointer(&batchBytes[0])), srcSize, + (*C.uchar)(unsafe.Pointer(&outbuf[0])), &outbufSize); err != nil { + return nil, fmt.Errorf("failed to compress scroll batch bytes: %s", C.GoString(err)) + } + + return outbuf[:int(outbufSize)], nil +} From 9532963eb5c251a94a29bc1c35bcf8567b8fc57e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 21:06:39 +0800 Subject: [PATCH 09/46] move symbol replace script to zstd folder --- .../add_scroll_prefix_in_zstd_related_symbols.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename encoding/{codecv2 => zstd}/add_scroll_prefix_in_zstd_related_symbols.sh (100%) diff --git a/encoding/codecv2/add_scroll_prefix_in_zstd_related_symbols.sh b/encoding/zstd/add_scroll_prefix_in_zstd_related_symbols.sh similarity index 100% rename from encoding/codecv2/add_scroll_prefix_in_zstd_related_symbols.sh rename to encoding/zstd/add_scroll_prefix_in_zstd_related_symbols.sh From 990bdb38b88ed3bf176182b269562c319a4e08f2 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 22:20:30 +0800 Subject: [PATCH 10/46] refactor: move some util functions to public package --- encoding/codecv0/codecv0.go | 53 +++++++++++++------------------------ encoding/codecv1/codecv1.go | 45 +++++++++++-------------------- encoding/da.go | 15 +++++++++++ 3 files changed, 49 insertions(+), 64 deletions(-) diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0/codecv0.go index f757a93..18b0b51 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -302,21 +302,6 @@ func (b *DABatch) Hash() common.Hash { return crypto.Keccak256Hash(bytes) } -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 - -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) -} - -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { - memorySizeWord := (memoryByteSize + 31) / 32 - memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) - return memoryCost -} - // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { var size uint64 @@ -349,13 +334,13 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { if err != nil { return 0, err } - total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash } // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += encoding.CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -400,11 +385,11 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } @@ -413,22 +398,22 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -442,14 +427,14 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 4ed048b..1675c76 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -406,21 +406,6 @@ func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { return dataSize, nil } -// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. -const CalldataNonZeroByteGas = 16 - -// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. -func GetKeccak256Gas(size uint64) uint64 { - return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) -} - -// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. -func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { - memorySizeWord := (memoryByteSize + 31) / 32 - memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) - return memoryCost -} - // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { var total uint64 @@ -433,7 +418,7 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { } // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += encoding.CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -467,10 +452,10 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas } @@ -479,22 +464,22 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -505,11 +490,11 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize := EstimateChunkL1CommitCalldataSize(chunk) - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas diff --git a/encoding/da.go b/encoding/da.go index b085351..eb66b7c 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -16,6 +16,9 @@ import ( // BLSModulus is the BLS modulus defined in EIP-4844. var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001")) +// CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. +const CalldataNonZeroByteGas = 16 + // CodecVersion defines the version of encoder and decoder. type CodecVersion uint8 @@ -446,3 +449,15 @@ func ConstructBatchPayloadInBlob(chunks []*Chunk, MaxNumChunks uint64) ([]byte, } return batchBytes, nil } + +// GetKeccak256Gas calculates the gas cost for computing the keccak256 hash of a given size. +func GetKeccak256Gas(size uint64) uint64 { + return GetMemoryExpansionCost(size) + 30 + 6*((size+31)/32) +} + +// GetMemoryExpansionCost calculates the cost of memory expansion for a given memoryByteSize. +func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { + memorySizeWord := (memoryByteSize + 31) / 32 + memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) + return memoryCost +} From 6b868668c4b01175cd16e2b83ef07b968835f2b5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 22:24:41 +0800 Subject: [PATCH 11/46] fix CI --- encoding/codecv0/codecv0.go | 10 +++++----- encoding/codecv1/codecv1.go | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0/codecv0.go index 18b0b51..0eee94b 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -349,11 +349,11 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1/codecv1.go index 1675c76..154bb26 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -427,11 +427,11 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total } From 3ad692a507444d6380d92baca214371ccd37d042 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 22:36:03 +0800 Subject: [PATCH 12/46] add interfaces of codec --- encoding/dacodec/dacodec.go | 48 +++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 encoding/dacodec/dacodec.go diff --git a/encoding/dacodec/dacodec.go b/encoding/dacodec/dacodec.go new file mode 100644 index 0000000..e210092 --- /dev/null +++ b/encoding/dacodec/dacodec.go @@ -0,0 +1,48 @@ +package dacodec + +import ( + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error +} + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk interface { + Encode() []byte + Hash() (common.Hash, error) +} + +// DABatch contains metadata about a batch of DAChunks. +type DABatch interface { + Encode() []byte + Hash() common.Hash + BlobDataProofForPointEvaluation() ([]byte, error) + Blob() *kzg4844.Blob + BlobBytes() []byte +} + +// Codec represents the interface for encoding and decoding DA-related structures. +type Codec interface { + NewDABlock(*encoding.Block, uint64) (DABlock, error) + NewDAChunk(*encoding.Chunk, uint64) (DAChunk, error) + NewDABatch(*encoding.Batch) (DABatch, error) + NewDABatchFromBytes([]byte) (DABatch, error) + + ComputeBatchDataHash([]*encoding.Chunk, uint64) (common.Hash, error) + ConstructBlobPayload([]*encoding.Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) + + EstimateChunkL1CommitBatchSizeAndBlobSize(*encoding.Chunk) (uint64, uint64, error) + EstimateBatchL1CommitBatchSizeAndBlobSize(*encoding.Batch) (uint64, uint64, error) + CheckChunkCompressedDataCompatibility(*encoding.Chunk) (bool, error) + CheckBatchCompressedDataCompatibility(*encoding.Batch) (bool, error) + EstimateChunkL1CommitCalldataSize(*encoding.Chunk) uint64 + EstimateChunkL1CommitGas(*encoding.Chunk) uint64 + EstimateBatchL1CommitGas(*encoding.Batch) uint64 + EstimateBatchL1CommitCalldataSize(*encoding.Batch) uint64 +} From a5c6430f1cfa1c62e72e87e844b5485a4f8e1680 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 23:00:46 +0800 Subject: [PATCH 13/46] add SetCompression --- encoding/dacodec/dacodec.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/encoding/dacodec/dacodec.go b/encoding/dacodec/dacodec.go index e210092..efafc44 100644 --- a/encoding/dacodec/dacodec.go +++ b/encoding/dacodec/dacodec.go @@ -45,4 +45,6 @@ type Codec interface { EstimateChunkL1CommitGas(*encoding.Chunk) uint64 EstimateBatchL1CommitGas(*encoding.Batch) uint64 EstimateBatchL1CommitCalldataSize(*encoding.Batch) uint64 + + SetCompression(enable bool) // only used for codecv4 } From 43f56e604d17c1a92c8ad7b52bbeb59d3b352e78 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 20 Aug 2024 23:42:23 +0800 Subject: [PATCH 14/46] move interface to encoding --- encoding/dacodec/dacodec.go | 50 ------------------------------------- encoding/encoding.go | 49 ++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 50 deletions(-) delete mode 100644 encoding/dacodec/dacodec.go create mode 100644 encoding/encoding.go diff --git a/encoding/dacodec/dacodec.go b/encoding/dacodec/dacodec.go deleted file mode 100644 index efafc44..0000000 --- a/encoding/dacodec/dacodec.go +++ /dev/null @@ -1,50 +0,0 @@ -package dacodec - -import ( - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" -) - -// DABlock represents a Data Availability Block. -type DABlock interface { - Encode() []byte - Decode([]byte) error -} - -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk interface { - Encode() []byte - Hash() (common.Hash, error) -} - -// DABatch contains metadata about a batch of DAChunks. -type DABatch interface { - Encode() []byte - Hash() common.Hash - BlobDataProofForPointEvaluation() ([]byte, error) - Blob() *kzg4844.Blob - BlobBytes() []byte -} - -// Codec represents the interface for encoding and decoding DA-related structures. -type Codec interface { - NewDABlock(*encoding.Block, uint64) (DABlock, error) - NewDAChunk(*encoding.Chunk, uint64) (DAChunk, error) - NewDABatch(*encoding.Batch) (DABatch, error) - NewDABatchFromBytes([]byte) (DABatch, error) - - ComputeBatchDataHash([]*encoding.Chunk, uint64) (common.Hash, error) - ConstructBlobPayload([]*encoding.Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) - - EstimateChunkL1CommitBatchSizeAndBlobSize(*encoding.Chunk) (uint64, uint64, error) - EstimateBatchL1CommitBatchSizeAndBlobSize(*encoding.Batch) (uint64, uint64, error) - CheckChunkCompressedDataCompatibility(*encoding.Chunk) (bool, error) - CheckBatchCompressedDataCompatibility(*encoding.Batch) (bool, error) - EstimateChunkL1CommitCalldataSize(*encoding.Chunk) uint64 - EstimateChunkL1CommitGas(*encoding.Chunk) uint64 - EstimateBatchL1CommitGas(*encoding.Batch) uint64 - EstimateBatchL1CommitCalldataSize(*encoding.Batch) uint64 - - SetCompression(enable bool) // only used for codecv4 -} diff --git a/encoding/encoding.go b/encoding/encoding.go new file mode 100644 index 0000000..db3b027 --- /dev/null +++ b/encoding/encoding.go @@ -0,0 +1,49 @@ +package encoding + +import ( + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error +} + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunk interface { + Encode() []byte + Hash() (common.Hash, error) +} + +// DABatch contains metadata about a batch of DAChunks. +type DABatch interface { + Encode() []byte + Hash() common.Hash + BlobDataProofForPointEvaluation() ([]byte, error) + Blob() *kzg4844.Blob + BlobBytes() []byte +} + +// Codec represents the interface for encoding and decoding DA-related structures. +type Codec interface { + NewDABlock(*Block, uint64) (DABlock, error) + NewDAChunk(*Chunk, uint64) (DAChunk, error) + NewDABatch(*Batch) (DABatch, error) + NewDABatchFromBytes([]byte) (DABatch, error) + + ComputeBatchDataHash([]*Chunk, uint64) (common.Hash, error) + ConstructBlobPayload([]*Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) + + EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) + EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) + CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) + CheckBatchCompressedDataCompatibility(*Batch) (bool, error) + EstimateChunkL1CommitCalldataSize(*Chunk) uint64 + EstimateChunkL1CommitGas(*Chunk) uint64 + EstimateBatchL1CommitGas(*Batch) uint64 + EstimateBatchL1CommitCalldataSize(*Batch) uint64 + + SetCompression(enable bool) // only used for codecv4 +} From cd280de54d4aeaad334693b73ef047da45646f10 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 03:30:44 +0800 Subject: [PATCH 15/46] refactor --- encoding/{codecv0 => }/codecv0.go | 183 +++-- encoding/codecv0/codecv0_test.go | 597 ---------------- encoding/{codecv1 => }/codecv1.go | 239 ++++--- encoding/codecv1/codecv1_test.go | 892 ----------------------- encoding/{codecv2 => }/codecv2.go | 137 ++-- encoding/codecv2/codecv2_test.go | 967 ------------------------- encoding/{codecv3 => }/codecv3.go | 120 ++-- encoding/codecv3/codecv3_test.go | 1098 ----------------------------- encoding/{codecv4 => }/codecv4.go | 155 ++-- encoding/codecv4/codecv4_test.go | 837 ---------------------- encoding/da.go | 28 +- encoding/encoding.go | 44 +- encoding/zstd/zstd.go | 1 + 13 files changed, 525 insertions(+), 4773 deletions(-) rename encoding/{codecv0 => }/codecv0.go (63%) delete mode 100644 encoding/codecv0/codecv0_test.go rename encoding/{codecv1 => }/codecv1.go (59%) delete mode 100644 encoding/codecv1/codecv1_test.go rename encoding/{codecv2 => }/codecv2.go (70%) delete mode 100644 encoding/codecv2/codecv2_test.go rename encoding/{codecv3 => }/codecv3.go (63%) delete mode 100644 encoding/codecv3/codecv3_test.go rename encoding/{codecv4 => }/codecv4.go (70%) delete mode 100644 encoding/codecv4/codecv4_test.go diff --git a/encoding/codecv0/codecv0.go b/encoding/codecv0.go similarity index 63% rename from encoding/codecv0/codecv0.go rename to encoding/codecv0.go index 0eee94b..9a92879 100644 --- a/encoding/codecv0/codecv0.go +++ b/encoding/codecv0.go @@ -1,4 +1,4 @@ -package codecv0 +package encoding import ( "encoding/binary" @@ -12,12 +12,13 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" - - "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +type DACodecV0 struct{} + // DABlock represents a Data Availability Block. -type DABlock struct { +type DABlockV0 struct { BlockNumber uint64 Timestamp uint64 BaseFee *big.Int @@ -27,13 +28,13 @@ type DABlock struct { } // DAChunk groups consecutive DABlocks with their transactions. -type DAChunk struct { - Blocks []*DABlock +type DAChunkV0 struct { + Blocks []*DABlockV0 Transactions [][]*types.TransactionData } // DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +type DABatchV0 struct { Version uint8 BatchIndex uint64 L1MessagePopped uint64 @@ -43,8 +44,8 @@ type DABatch struct { SkippedL1MessageBitmap []byte } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -62,7 +63,7 @@ func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABl return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := DABlock{ + daBlock := DABlockV0{ BlockNumber: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, @@ -75,7 +76,7 @@ func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABl } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlock) Encode() []byte { +func (b *DABlockV0) Encode() []byte { bytes := make([]byte, 60) binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) @@ -89,7 +90,7 @@ func (b *DABlock) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlock) Decode(bytes []byte) error { +func (b *DABlockV0) Decode(bytes []byte) error { if len(bytes) != 60 { return errors.New("block encoding is not 60 bytes long") } @@ -104,9 +105,9 @@ func (b *DABlock) Decode(bytes []byte) error { return nil } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - var blocks []*DABlock +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + var blocks []*DABlockV0 var txs [][]*types.TransactionData if chunk == nil { @@ -122,16 +123,20 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } for _, block := range chunk.Blocks { - b, err := NewDABlock(block, totalL1MessagePoppedBefore) + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } - blocks = append(blocks, b) + blockData, ok := b.(*DABlockV0) + if !ok { + return nil, errors.New("failed to cast block data") + } + blocks = append(blocks, blockData) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } - daChunk := DAChunk{ + daChunk := DAChunkV0{ Blocks: blocks, Transactions: txs, } @@ -140,7 +145,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunk) Encode() ([]byte, error) { +func (c *DAChunkV0) Encode() ([]byte, error) { if len(c.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -165,7 +170,7 @@ func (c *DAChunk) Encode() ([]byte, error) { } var txLen [4]byte - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(txData, false /* no mock */) + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return nil, err } @@ -180,7 +185,7 @@ func (c *DAChunk) Encode() ([]byte, error) { } // Hash computes the hash of the DAChunk data. -func (c *DAChunk) Hash() (common.Hash, error) { +func (c *DAChunkV0) Hash() (common.Hash, error) { chunkBytes, err := c.Encode() if err != nil { return common.Hash{}, err @@ -222,15 +227,15 @@ func (c *DAChunk) Hash() (common.Hash, error) { return hash, nil } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore for _, chunk := range batch.Chunks { // build data hash - daChunk, err := NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return nil, err } @@ -246,13 +251,13 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } - daBatch := DABatch{ - Version: uint8(encoding.CodecV0), + daBatch := DABatchV0{ + Version: uint8(CodecV0), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -265,12 +270,12 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -284,7 +289,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) @@ -297,20 +302,35 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV0) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } +// Blob returns the blob of the batch. +func (b *DABatchV0) Blob() *kzg4844.Blob { + return nil +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV0) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 for _, txData := range b.Transactions { if txData.Type == types.L1MessageTxType { continue } size += 4 // 4 bytes payload length - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := GetTxPayloadLength(txData) if err != nil { return 0, err } @@ -321,7 +341,7 @@ func EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -330,17 +350,17 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { continue } - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := GetTxPayloadLength(txData) if err != nil { return 0, err } - total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += GetKeccak256Gas(txPayloadLength) // l2 tx hash } // 60 bytes BlockContext calldata - total += encoding.CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -349,20 +369,20 @@ func EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, block := range c.Blocks { - blockL1CommitCalldataSize, err := EstimateBlockL1CommitCalldataSize(block) + blockL1CommitCalldataSize, err := o.EstimateBlockL1CommitCalldataSize(block) if err != nil { return 0, err } @@ -372,12 +392,12 @@ func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalTxNum uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalTxNum += uint64(len(block.Transactions)) - blockL1CommitGas, err := EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) if err != nil { return 0, err } @@ -385,40 +405,40 @@ func EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas, err := EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) if err != nil { return 0, err } @@ -427,24 +447,24 @@ func EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - totalL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) + totalL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } @@ -453,10 +473,25 @@ func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { return totalL1CommitCalldataSize, nil } -func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return 0, err - } - return uint64(len(rlpTxData)), nil +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return true, nil } + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return 0, 0, nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return 0, 0, nil +} + +// SetCompression enables or disables compression. +func (o *DACodecV0) SetCompression(enable bool) {} diff --git a/encoding/codecv0/codecv0_test.go b/encoding/codecv0/codecv0_test.go deleted file mode 100644 index 330a826..0000000 --- a/encoding/codecv0/codecv0_test.go +++ /dev/null @@ -1,597 +0,0 @@ -package codecv0 - -import ( - "encoding/hex" - "encoding/json" - "math/big" - "os" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/log" - "github.com/stretchr/testify/assert" - - "github.com/scroll-tech/da-codec/encoding" -) - -func TestCodecV0(t *testing.T) { - glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat())) - glogger.Verbosity(log.LvlInfo) - log.Root().SetHandler(glogger) - - parentDABatch, err := NewDABatch(&encoding.Batch{ - Index: 0, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: common.Hash{}, - Chunks: nil, - }) - assert.NoError(t, err) - parentBatchHash := parentDABatch.Hash() - - block1 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block5 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block6 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - - blockL1CommitCalldataSize, err := EstimateBlockL1CommitCalldataSize(block1) - assert.NoError(t, err) - assert.Equal(t, uint64(298), blockL1CommitCalldataSize) - blockL1CommitGas, err := EstimateBlockL1CommitGas(block1) - assert.NoError(t, err) - assert.Equal(t, uint64(4900), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block2) - assert.NoError(t, err) - assert.Equal(t, uint64(5745), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block2) - assert.NoError(t, err) - assert.Equal(t, uint64(93613), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block3) - assert.NoError(t, err) - assert.Equal(t, uint64(96), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block3) - assert.NoError(t, err) - assert.Equal(t, uint64(4187), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block4) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block4) - assert.NoError(t, err) - assert.Equal(t, uint64(14020), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block5) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block5) - assert.NoError(t, err) - assert.Equal(t, uint64(8796), blockL1CommitGas) - blockL1CommitCalldataSize, err = EstimateBlockL1CommitCalldataSize(block6) - assert.NoError(t, err) - assert.Equal(t, uint64(60), blockL1CommitCalldataSize) - blockL1CommitGas, err = EstimateBlockL1CommitGas(block6) - assert.NoError(t, err) - assert.Equal(t, uint64(6184), blockL1CommitGas) - - // Test case: when the batch and chunk contains one block. - chunk := &encoding.Chunk{ - Blocks: []*encoding.Block{block1}, - } - chunkL1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(298), chunkL1CommitCalldataSize) - chunkL1CommitGas, err := EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(6042), chunkL1CommitGas) - - daChunk, err := NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err := daChunk.Encode() - assert.NoError(t, err) - chunkHexString := hex.EncodeToString(chunkBytes) - assert.Equal(t, 299, len(chunkBytes)) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e81840002000000000073f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8b00000073f87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1", chunkHexString) - daChunkHash, err := daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xde642c68122634b33fa1e6e4243b17be3bfd0dc6f996f204ef6d7522516bd840"), daChunkHash) - - batch := &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err := EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(298), batchL1CommitCalldataSize) - batchL1CommitGas, err := EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(162591), batchL1CommitGas) - - daBatch, err := NewDABatch(batch) - assert.NoError(t, err) - batchBytes := daBatch.Encode() - batchHexString := hex.EncodeToString(batchBytes) - assert.Equal(t, 89, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000000000000000000008fbc5eecfefc5bd9d1618ecef1fed160a7838448383595a2257d4c9bd5c5fa3eb0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0", batchHexString) - assert.Equal(t, 0, len(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(0), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(0), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa906c7d2b6b68ea5fec3ff9d60d41858676e0d365e5d5ef07b2ce20fcf24ecd7"), daBatch.Hash()) - - decodedDABatch, err := NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes := decodedDABatch.Encode() - decodedBatchHexString := hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: when the batch and chunk contains two block. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block1, block2}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(6043), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(100742), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 6044, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x014916a83eccdb0d01e814b4d4ab90eb9049ba9a3cb0994919b86ad873bcd028"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(6043), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(257897), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 89, len(batchBytes)) - assert.Equal(t, "0000000000000000010000000000000000000000000000000074dd561a36921590926bee01fd0d53747c5f3e48e48a2d5538b9ab0e1511cfd7b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0", batchHexString) - assert.Equal(t, 0, len(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(0), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(0), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xb02e39b740756824d20b2cac322ac365121411ced9d6e34de98a0b247c6e23e6"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: when the chunk contains one block with 1 L1MsgTx. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block3}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(96), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(5329), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - chunkHexString = hex.EncodeToString(chunkBytes) - assert.Equal(t, 97, len(chunkBytes)) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b00000020df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e1058080808080", chunkHexString) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x9e643c8a9203df542e39d9bfdcb07c99575b3c3d557791329fef9d83cc4147d0"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(96), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(161889), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000b000000000000000b34f419ce7e882295bdb5aec6cce56ffa788a5fed4744d7fbd77e4acbf409f1cab0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab000000000000000000000000000000000000000000000000000000000000003ff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap := "00000000000000000000000000000000000000000000000000000000000003ff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(11), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(11), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa18f07cb56ab4f2db5914d9b5699c5932bea4b5c73e71c8cec79151c11e9e986"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: batch contains multiple chunks, chunk contains multiple blocks. - chunk1 := &encoding.Chunk{ - Blocks: []*encoding.Block{block1, block2, block3}, - } - chunk1L1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk1) - assert.NoError(t, err) - assert.Equal(t, uint64(6139), chunk1L1CommitCalldataSize) - chunk1L1CommitGas, err := EstimateChunkL1CommitGas(chunk1) - assert.NoError(t, err) - assert.Equal(t, uint64(106025), chunk1L1CommitGas) - - daChunk1, err := NewDAChunk(chunk1, 0) - assert.NoError(t, err) - chunkBytes1, err := daChunk1.Encode() - assert.NoError(t, err) - assert.Equal(t, 6140, len(chunkBytes1)) - - chunk2 := &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunk2L1CommitCalldataSize, err := EstimateChunkL1CommitCalldataSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunk2L1CommitCalldataSize) - chunk2L1CommitGas, err := EstimateChunkL1CommitGas(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunk2L1CommitGas) - - daChunk2, err := NewDAChunk(chunk2, 0) - assert.NoError(t, err) - chunkBytes2, err := daChunk2.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes2)) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk1, chunk2}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(6199), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(279054), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000002a000000000000002a1f9b3d942a6ee14e7afc52225c91fa44faa0a7ec511df9a2d9348d33bcd142fcb0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000001ffffffbff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000001ffffffbff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(42), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xf7bd6afe02764e4e6df23a374d753182b57fa77be71aaf1cd8365e15a51872d1"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many consecutive L1 Msgs in 1 bitmap, no leading skipped msgs. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 37, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(171730), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "0000000000000000010000000000000005000000000000002ac62fb58ec2d5393e00960f1cc23cab883b685296efa03d13ea2dd4c6de79cc55b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000000000000000", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000000000000000" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(5), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x841f4657b7eb723cae35377cf2963b51191edad6a3b182d4c8524cb928d2a413"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many consecutive L1 Msgs in 1 bitmap, with leading skipped msgs. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block4}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(15189), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(171810), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab00000000000000000000000000000000000000000000000000000001fffffffff", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "0000000000000000000000000000000000000000000000000000001fffffffff" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(42), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(42), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0xa28766a3617cf244cc397fc4ce4c23022ec80f152b9f618807ac7e7c11486612"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many sparse L1 Msgs in 1 bitmap. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block5}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(9947), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(166504), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 121, len(batchBytes)) - assert.Equal(t, "000000000000000001000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4db0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab000000000000000000000000000000000000000000000000000000000000001dd", batchHexString) - assert.Equal(t, 32, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "00000000000000000000000000000000000000000000000000000000000001dd" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(10), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(10), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x2fee2073639eb9795007f7e765b3318f92658822de40b2134d34a478a0e9058a"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) - - // Test case: many L1 Msgs in each of 2 bitmaps. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block6}, - } - chunkL1CommitCalldataSize, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(60), chunkL1CommitCalldataSize) - chunkL1CommitGas, err = EstimateChunkL1CommitGas(chunk) - assert.NoError(t, err) - assert.Equal(t, uint64(7326), chunkL1CommitGas) - - daChunk, err = NewDAChunk(chunk, 0) - assert.NoError(t, err) - chunkBytes, err = daChunk.Encode() - assert.NoError(t, err) - assert.Equal(t, 61, len(chunkBytes)) - daChunkHash, err = daChunk.Hash() - assert.NoError(t, err) - assert.Equal(t, common.HexToHash("0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3"), daChunkHash) - - batch = &encoding.Batch{ - Index: 1, - TotalL1MessagePoppedBefore: 0, - ParentBatchHash: parentBatchHash, - Chunks: []*encoding.Chunk{chunk}, - } - - batchL1CommitCalldataSize, err = EstimateBatchL1CommitCalldataSize(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(60), batchL1CommitCalldataSize) - batchL1CommitGas, err = EstimateBatchL1CommitGas(batch) - assert.NoError(t, err) - assert.Equal(t, uint64(164388), batchL1CommitGas) - - daBatch, err = NewDABatch(batch) - assert.NoError(t, err) - batchBytes = daBatch.Encode() - batchHexString = hex.EncodeToString(batchBytes) - assert.Equal(t, 153, len(batchBytes)) - assert.Equal(t, "00000000000000000100000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208b0a62a3048a2e6efb4e56e471eb826de86f8ccaa4af27c572b68db6f687b3ab0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", batchHexString) - assert.Equal(t, 64, len(daBatch.SkippedL1MessageBitmap)) - expectedBitmap = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000" - assert.Equal(t, expectedBitmap, common.Bytes2Hex(daBatch.SkippedL1MessageBitmap)) - assert.Equal(t, uint64(257), daBatch.TotalL1MessagePopped) - assert.Equal(t, uint64(257), daBatch.L1MessagePopped) - assert.Equal(t, common.HexToHash("0x84206bc6d0076a233fc7120a0bec4e03bf2512207437768828384dddb335ba2e"), daBatch.Hash()) - - decodedDABatch, err = NewDABatchFromBytes(batchBytes) - assert.NoError(t, err) - decodedBatchBytes = decodedDABatch.Encode() - decodedBatchHexString = hex.EncodeToString(decodedBatchBytes) - assert.Equal(t, batchHexString, decodedBatchHexString) -} - -func TestErrorPaths(t *testing.T) { - // Test case: when the chunk is nil. - _, err := NewDAChunk(nil, 100) - assert.Error(t, err) - assert.Contains(t, err.Error(), "chunk is nil") - - // Test case: when the chunk contains no blocks. - chunk := &encoding.Chunk{ - Blocks: []*encoding.Block{}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of blocks is 0") - - // Test case: when the chunk contains more than 255 blocks. - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{}, - } - for i := 0; i < 256; i++ { - chunk.Blocks = append(chunk.Blocks, &encoding.Block{}) - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of blocks exceeds 1 byte") - - // Test case: Header.Number is not a uint64. - block := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block.Header.Number = new(big.Int).Lsh(block.Header.Number, 64) - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "block number is not uint64") - - // Test case: number of transactions exceeds max uint16. - block = readBlockFromJSON(t, "../testdata/blockTrace_02.json") - for i := 0; i < 65537; i++ { - block.Transactions = append(block.Transactions, block.Transactions[0]) - } - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of transactions exceeds max uint16") - - // Test case: decode transaction with hex string without 0x prefix error. - block = readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block.Transactions = block.Transactions[:1] - block.Transactions[0].Data = "not-a-hex" - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = EstimateChunkL1CommitCalldataSize(chunk) - assert.Error(t, err) - assert.Contains(t, err.Error(), "hex string without 0x prefix") - _, err = EstimateChunkL1CommitGas(chunk) - assert.Error(t, err) - assert.Contains(t, err.Error(), "hex string without 0x prefix") - - // Test case: number of L1 messages exceeds max uint16. - block = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - for i := 0; i < 65535; i++ { - tx := &block.Transactions[i] - txCopy := *tx - txCopy.Nonce = uint64(i + 1) - block.Transactions = append(block.Transactions, txCopy) - } - chunk = &encoding.Chunk{ - Blocks: []*encoding.Block{block}, - } - _, err = NewDAChunk(chunk, 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "number of L1 messages exceeds max uint16") -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv1/codecv1.go b/encoding/codecv1.go similarity index 59% rename from encoding/codecv1/codecv1.go rename to encoding/codecv1.go index 154bb26..5a1d87d 100644 --- a/encoding/codecv1/codecv1.go +++ b/encoding/codecv1.go @@ -1,4 +1,4 @@ -package codecv1 +package encoding import ( "crypto/sha256" @@ -13,22 +13,21 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = 15 +type DACodecV1 struct{} + +// Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv1MaxNumChunks = 15 -// DABlock represents a Data Availability Block. -type DABlock = codecv0.DABlock +// DABlockV1 represents a Data Availability Block. +type DABlockV1 = DABlockV0 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk codecv0.DAChunk +// DAChunkV1 groups consecutive DABlocks with their transactions. +type DAChunkV1 DAChunkV0 -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +// DABatchV1 contains metadata about a batch of DAChunks. +type DABatchV1 struct { // header Version uint8 BatchIndex uint64 @@ -44,13 +43,13 @@ type DABatch struct { z *kzg4844.Point } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv0.NewDABlock(block, totalL1MessagePoppedBefore) +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -59,20 +58,24 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []*DABlock + var blocks []*DABlockV1 var txs [][]*types.TransactionData for _, block := range chunk.Blocks { - b, err := NewDABlock(block, totalL1MessagePoppedBefore) + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) if err != nil { return nil, err } - blocks = append(blocks, b) + blockData, ok := b.(*DABlockV1) + if !ok { + return nil, errors.New("failed to cast block data") + } + blocks = append(blocks, blockData) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } - daChunk := DAChunk{ + daChunk := DAChunkV1{ Blocks: blocks, Transactions: txs, } @@ -81,7 +84,7 @@ func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DACh } // Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunk) Encode() []byte { +func (c *DAChunkV1) Encode() ([]byte, error) { var chunkBytes []byte chunkBytes = append(chunkBytes, byte(len(c.Blocks))) @@ -90,11 +93,11 @@ func (c *DAChunk) Encode() []byte { chunkBytes = append(chunkBytes, blockBytes...) } - return chunkBytes + return chunkBytes, nil } // Hash computes the hash of the DAChunk data. -func (c *DAChunk) Hash() (common.Hash, error) { +func (c *DAChunkV1) Hash() (common.Hash, error) { var dataBytes []byte // concatenate block contexts @@ -127,10 +130,10 @@ func (c *DAChunk) Hash() (common.Hash, error) { return hash, nil } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -139,25 +142,25 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, err := constructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } - daBatch := DABatch{ - Version: uint8(encoding.CodecV1), + daBatch := DABatchV1{ + Version: uint8(CodecV1), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -176,12 +179,12 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV1) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore for _, chunk := range chunks { - daChunk, err := NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) if err != nil { return common.Hash{}, err } @@ -198,16 +201,16 @@ func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u } // constructBlobPayload constructs the 4844 blob payload. -func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + Codecv1MaxNumChunks*4 // the raw (un-padded) blob payload blobBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+Codecv1MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -227,7 +230,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, err } @@ -245,10 +248,10 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunks chunks, the rest + // if we have fewer than Codecv1MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < Codecv1MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -258,7 +261,7 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -271,11 +274,11 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+Codecv1MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -288,12 +291,12 @@ func constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV1{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -308,7 +311,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV1) Encode() []byte { batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) @@ -322,13 +325,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV1) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProof() ([]byte, error) { +func (b *DABatchV1) BlobDataProof() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProof with empty blob") } @@ -352,7 +355,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -360,35 +363,45 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV1) Blob() *kzg4844.Blob { return b.blob } +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV1) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { - metadataSize := uint64(2 + 4*MaxNumChunks) // over-estimate: adding metadata length - chunkDataSize, err := chunkL1CommitBlobDataSize(c) +func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { + metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length + chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } - return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { - metadataSize := uint64(2 + 4*MaxNumChunks) +func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { + metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { - chunkDataSize, err := chunkL1CommitBlobDataSize(c) + chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } batchDataSize += chunkDataSize } - return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } -func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { +func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { for _, tx := range block.Transactions { @@ -396,7 +409,7 @@ func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { continue } - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return 0, err } @@ -407,7 +420,7 @@ func chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { +func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -418,7 +431,7 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { } // 60 bytes BlockContext calldata - total += encoding.CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * 60 // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -427,84 +440,122 @@ func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl - return total + return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return uint64(60 * len(c.Blocks)) +func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return uint64(60 * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { +func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() - blockL1CommitGas := EstimateBlockL1CommitGas(block) + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } totalL1CommitGas += blockL1CommitGas } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash - return totalL1CommitGas + totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { +func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore for _, chunk := range b.Chunks { - chunkL1CommitGas := EstimateChunkL1CommitGas(chunk) + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } totalL1CommitGas += chunkL1CommitGas totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) - totalL1CommitCalldataSize := EstimateChunkL1CommitCalldataSize(chunk) - totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) } - return totalL1CommitGas + return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { +func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { - totalL1CommitCalldataSize += EstimateChunkL1CommitCalldataSize(chunk) + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize } - return totalL1CommitCalldataSize + return totalL1CommitCalldataSize, nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return true, nil } + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return true, nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return 0, 0, nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return 0, 0, nil +} + +// SetCompression enables or disables compression. +func (o *DACodecV1) SetCompression(enable bool) {} diff --git a/encoding/codecv1/codecv1_test.go b/encoding/codecv1/codecv1_test.go deleted file mode 100644 index b914ed6..0000000 --- a/encoding/codecv1/codecv1_test.go +++ /dev/null @@ -1,892 +0,0 @@ -package codecv1 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV1BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v1 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv1, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv1 := hex.EncodeToString(blockv1.Encode()) - - assert.Equal(t, encodedv0, encodedv1) - } -} - -func TestCodecV1ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV1ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV1BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV1)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc75530000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f94110000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b401a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "01000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d520801a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a60000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8014ae5927a983081a8bcdbcce19e926c9e4c56e2dc89c91c32c034b875b8a1ca00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "010000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e13476701b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) -} - -func TestCodecV1BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV1)} - assert.Equal(t, "0x4b6fe410f63051f6e93532087b42ece79fb7b966e2ba5845e6cd1c091f27e564", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd557b02638c0385d5124f7fc188a025b33f8819b7f78c000751404997148ab8b", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xf13c7e249d00941c59fe4cd970241bbd6753eede8e043c438165674031792b3b", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xb64208f07fab641f7ebf831686d05ad667da0c7bfabcbd9c878cc22cbc8032fd", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x4f7426d164e885574a661838406083f5292b0a1bc6dc20c51129eed0723b8a27", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xfce89ec2aed85cebeb20eea722e3ae4ec622bff49218dbe249a2d358e2e85451", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x8fc063179b709bab338674278bb7b70dce2879a4e11ea857b3a202fb3313559f", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xf1c94cdf45967bc60bfccd599edd8cb07fd0201f41ab068637834f86140f62bf", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xfef0b56bd889529e3a1d884c88dd1c867e084fdc1369496907be8f865f43f0e0", batch.Hash().Hex()) -} - -func TestCodecV1BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV1BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, - // metadata - "00"+"0001"+"000000e6"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00"+"00"+"000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+"00000000"+ - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1", encoded) - assert.Equal(t, "0x01af944924715b48be6ce3c35aef7500a50e909265599bd2b3e544ac59fc7553", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001000016310000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002f9162d82cf5502843b9b0a17843b9b0a17831197e28080b915d26080604000523480156200001157600080fd5b50604051620014b2380380620014b283390081810160405260a08110156200003757600080fd5b8151602083015160408000850180519151939592948301929184640100000000821115620000635760000080fd5b9083019060208201858111156200007957600080fd5b8251640100000000008111828201881017156200009457600080fd5b8252508151602091820100929091019080838360005b83811015620000c357818101518382015260200100620000a9565b50505050905090810190601f168015620000f1578082038051006001836020036101000a031916815260200191505b5060405260200180516000405193929190846401000000008211156200011557600080fd5b908301906000208201858111156200012b57600080fd5b8251640100000000811182820188001017156200014657600080fd5b8252508151602091820192909101908083830060005b83811015620001755781810151838201526020016200015b565b5050005050905090810190601f168015620001a3578082038051600183602003610100000a031916815260200191505b506040526020908101518551909350859250008491620001c8916003918501906200026b565b508051620001de906004906000208401906200026b565b50506005805461ff001960ff199091166012171690005550600680546001600160a01b038088166001600160a01b031992831617900092556007805492871692909116919091179055620002308162000255565b5000506005805462010000600160b01b031916336201000002179055506200030700915050565b6005805460ff191660ff92909216919091179055565b82805460000181600116156101000203166002900490600052602060002090601f01602000900481019282601f10620002ae57805160ff1916838001178555620002de56005b82800160010185558215620002de579182015b82811115620002de57825100825591602001919060010190620002c1565b50620002ec929150620002f056005b5090565b5b80821115620002ec5760008155600101620002f1565b61119b0080620003176000396000f3fe608060405234801561001057600080fd5b50600004361061010b5760003560e01c80635c975abb116100a257806395d89b41110061007157806395d89b41146103015780639dc29fac14610309578063a457c200d714610335578063a9059cbb14610361578063dd62ed3e1461038d5761010b00565b80635c975abb1461029d57806370a08231146102a55780638456cb5914006102cb5780638e50817a146102d35761010b565b8063313ce567116100de57008063313ce5671461021d578063395093511461023b5780633f4ba83a146102006757806340c10f19146102715761010b565b806306fdde031461011057806300095ea7b31461018d57806318160ddd146101cd57806323b872dd146101e757005b600080fd5b6101186103bb565b604080516020808252835181830152835100919283929083019185019080838360005b838110156101525781810151838200015260200161013a565b50505050905090810190601f16801561017f578082000380516001836020036101000a031916815260200191505b50925050506040005180910390f35b6101b9600480360360408110156101a357600080fd5b50600001600160a01b038135169060200135610451565b60408051911515825251900081900360200190f35b6101d561046e565b6040805191825251908190036020000190f35b6101b9600480360360608110156101fd57600080fd5b50600160010060a01b03813581169160208101359091169060400135610474565b610225610004fb565b6040805160ff9092168252519081900360200190f35b6101b9600400803603604081101561025157600080fd5b506001600160a01b03813516906000200135610504565b61026f610552565b005b61026f600480360360408110150061028757600080fd5b506001600160a01b0381351690602001356105a9565b006101b9610654565b6101d5600480360360208110156102bb57600080fd5b5000356001600160a01b0316610662565b61026f61067d565b61026f60048036030060408110156102e957600080fd5b506001600160a01b0381358116916020010035166106d2565b610118610757565b61026f6004803603604081101561031f0057600080fd5b506001600160a01b0381351690602001356107b8565b6101b9006004803603604081101561034b57600080fd5b506001600160a01b0381351600906020013561085f565b6101b96004803603604081101561037757600080fd005b506001600160a01b0381351690602001356108c7565b6101d560048036030060408110156103a357600080fd5b506001600160a01b0381358116916020010035166108db565b60038054604080516020601f600260001961010060018816001502019095169490940493840181900481028201810190925282815260609300909290918301828280156104475780601f1061041c5761010080835404028300529160200191610447565b820191906000526020600020905b8154815290600001019060200180831161042a57829003601f168201915b505050505090509000565b600061046561045e610906565b848461090a565b50600192915050565b0060025490565b60006104818484846109f6565b6104f18461048d610906565b006104ec8560405180606001604052806028815260200161108560289139600100600160a01b038a166000908152600160205260408120906104cb610906565b006001600160a01b031681526020810191909152604001600020549190610b5100565b61090a565b5060019392505050565b60055460ff1690565b600061046500610511610906565b846104ec8560016000610522610906565b6001600160a0001b03908116825260208083019390935260409182016000908120918c16815200925290205490610be8565b6007546001600160a01b0316331461059f57604000805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a7610c0049565b565b600554610100900460ff16156105f9576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610646576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610ced565b5050565b600554610100900460ff001690565b6001600160a01b031660009081526020819052604090205490565b006007546001600160a01b031633146106ca576040805162461bcd60e51b81520060206004820152600b60248201526a1b9bdd08185b1b1bddd95960aa1b60440082015290519081900360640190fd5b6105a7610ddd565b600554620100009000046001600160a01b03163314610726576040805162461bcd60e51b81526020006004820152600c60248201526b6f6e6c7920466163746f727960a01b60448200015290519081900360640190fd5b600780546001600160a01b03928316600100600160a01b0319918216179091556006805493909216921691909117905556005b60048054604080516020601f600260001961010060018816150201909516009490940493840181900481028201810190925282815260609390929091830100828280156104475780601f1061041c5761010080835404028352916020019100610447565b600554610100900460ff1615610808576040805162461bcd60e5001b815260206004820152601060248201526f14185d5cd8589b194e881c185d005cd95960821b604482015290519081900360640190fd5b600654600160016000a01b03163314610855576040805162461bcd60e51b81526020600482015260000b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908190000360640190fd5b6106508282610e65565b600061046561086c610906565b84006104ec85604051806060016040528060258152602001611117602591396001006000610896610906565b6001600160a01b0390811682526020808301939093005260409182016000908120918d16815292529020549190610b51565b6000610004656108d4610906565b84846109f6565b6001600160a01b0391821660009000815260016020908152604080832093909416825291909152205490565b339000565b6001600160a01b03831661094f5760405162461bcd60e51b8152600401008080602001828103825260248152602001806110f3602491396040019150500060405180910390fd5b6001600160a01b0382166109945760405162461bcd6000e51b815260040180806020018281038252602281526020018061103d602291003960400191505060405180910390fd5b6001600160a01b038084166000818100526001602090815260408083209487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b9259281900390910190a3505050565b6001600160a01b03831600610a3b5760405162461bcd60e51b8152600401808060200182810382526025008152602001806110ce6025913960400191505060405180910390fd5b600160000160a01b038216610a805760405162461bcd60e51b815260040180806020010082810382526023815260200180610ff8602391396040019150506040518091000390fd5b610a8b838383610f61565b610ac8816040518060600160405280600026815260200161105f602691396001600160a01b038616600090815260208100905260409020549190610b51565b6001600160a01b03808516600090815260002081905260408082209390935590841681522054610af79082610be8565b600001600160a01b03808416600081815260208181526040918290209490945580005185815290519193928716927fddf252ad1be2c89b69c2b068fc378daa952b00a7f163c4a11628f55a4df523b3ef92918290030190a3505050565b6000818400841115610be05760405162461bcd60e51b8152600401808060200182810382005283818151815260200191508051906020019080838360005b83811015610b00a5578181015183820152602001610b8d565b50505050905090810190601f16008015610bd25780820380516001836020036101000a03191681526020019150005b509250505060405180910390fd5b505050900390565b60008282018381100015610c42576040805162461bcd60e51b815260206004820152601b6024820100527f536166654d6174683a206164646974696f6e206f766572666c6f77000000000000604482015290519081900360640190fd5b9392505050565b60055461000100900460ff16610c9c576040805162461bcd60e51b81526020600482015200601460248201527314185d5cd8589b194e881b9bdd081c185d5cd95960621b00604482015290519081900360640190fd5b6005805461ff00191690557f5db900ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa61000cd0610906565b604080516001600160a01b03909216825251908190036020000190a1565b6001600160a01b038216610d48576040805162461bcd60e51b81005260206004820152601f60248201527f45524332303a206d696e7420746f2000746865207a65726f20616464726573730060448201529051908190036064010090fd5b610d5460008383610f61565b600254610d619082610be8565b600255006001600160a01b038216600090815260208190526040902054610d87908261000be8565b6001600160a01b038316600081815260208181526040808320949000945583518581529351929391927fddf252ad1be2c89b69c2b068fc378daa95002ba7f163c4a11628f55a4df523b3ef9281900390910190a35050565b60055400610100900460ff1615610e2d576040805162461bcd60e51b81526020600482000152601060248201526f14185d5cd8589b194e881c185d5cd95960821b60440082015290519081900360640190fd5b6005805461ff0019166101001790557f0062e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc544b05a20058610cd0610906565b6001600160a01b038216610eaa5760405162461bcd6000e51b81526004018080602001828103825260218152602001806110ad602191003960400191505060405180910390fd5b610eb682600083610f61565b610ef3008160405180606001604052806022815260200161101b60229139600160016000a01b0385166000908152602081905260409020549190610b51565b600160010060a01b038316600090815260208190526040902055600254610f199082610f00b5565b6002556040805182815290516000916001600160a01b038516917fdd00f252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef009181900360200190a35050565b610f6c838383610fb0565b610f7461065456005b15610fb05760405162461bcd60e51b81526004018080602001828103825200602a81526020018061113c602a913960400191505060405180910390fd5b50005050565b6000610c4283836040518060400160405280601e81526020017f53006166654d6174683a207375627472616374696f6e206f766572666c6f77000000815250610b5156fe45524332303a207472616e7366657220746f2074686520007a65726f206164647265737345524332303a206275726e20616d6f756e742000657863656564732062616c616e636545524332303a20617070726f76652074006f20746865207a65726f206164647265737345524332303a207472616e736600657220616d6f756e7420657863656564732062616c616e636545524332303a00207472616e7366657220616d6f756e74206578636565647320616c6c6f7761006e636545524332303a206275726e2066726f6d20746865207a65726f20616400647265737345524332303a207472616e736665722066726f6d20746865207a0065726f206164647265737345524332303a20617070726f76652066726f6d2000746865207a65726f206164647265737345524332303a206465637265617365006420616c6c6f77616e63652062656c6f77207a65726f4552433230506175730061626c653a20746f6b656e207472616e73666572207768696c652070617573006564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3bed0057770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63000000000000000000000000001c5a77d9fa7ef466951b2f01f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000009570045544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7", encoded) - assert.Equal(t, "0x010c54fa675ed1b78f269827177019b0814a4ac4d269c68037e2c41cf08f9411", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0000010000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080008", encoded) - assert.Equal(t, "0x01ea66c4de196d36e2c3a5d7c0045100b9e46ef65be8f7a921ef20e6f2e99ebd", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "000001", encoded) - assert.Equal(t, "0x01a327088bb2b13151449d8313c281d0006d12e8453e863637b746898b6ad5a6", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, - // metadata - "00"+"000f"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"00"+"00"+"0000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+"000000e6"+ - // tx payload - "00f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea003f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ece00a0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86d00f514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288b00baf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf000d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f0010c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f002b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b009aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d0002c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b00219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d199600b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940100bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000800083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393e00b095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87938000aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b600e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae9900c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cb00d19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8007101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce941100ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b002cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec005bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e830700a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de10200513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c57008fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e900a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea000f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f7730016a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6e00ed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2ade00ceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7b00a5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd7300e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9a00ec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d0200c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c400c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808300019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a0041e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfa00ec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc060015b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03998586600d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e0081065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a100209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260004393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f00879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6a00cb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab0007ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df51400a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf4002a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d6900ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d8256f10c100be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b460004bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec002e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c700e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b001de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b500243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae600bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000808301009ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb09500b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac100c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb009e2adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67a00a78e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19f00eacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710100843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a00152d02c7e14af60000008083019ecea0f039985866d8256f10c1be4f7b2cac00e28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd400aba684835996fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a1200094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d0056548a41e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd700f51bfaec288bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03f00b2bc0615b1bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e008307a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e1004af60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f1f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf87101843b9aec2e8307a1209401bae6bf68e9a03fb2bc0615b100bf0d69ce9411ed8a152d02c7e14af60000008083019ecea0f039985866d825006f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e8106005f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f1f8718084003b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a15002d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90cc3002b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d190096b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e8307a120940001bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14af6000000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde27e260439003eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba684835996fc3f87930080aac1c09c6eed32f1", encoded) - assert.Equal(t, "0x01521b20f341588dea5978efb00d7b077a986598a6001fc2e5859d77f3ffc284", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0000020000173700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf87101843b9aec2e830007a1209401bae6bf68e9a03fb2bc0615b1bf0d69ce9411ed8a152d02c7e14a00f60000008083019ecea0f039985866d8256f10c1be4f7b2cace28d8f20bde2007e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483599600fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380620014b2833981810160405260a0811015620000375760000080fd5b81516020830151604080850180519151939592948301929184640100000000008211156200006357600080fd5b908301906020820185811115620000007957600080fd5b8251640100000000811182820188101715620000945760000080fd5b82525081516020918201929091019080838360005b8381101562000000c3578181015183820152602001620000a9565b50505050905090810190601f00168015620000f15780820380516001836020036101000a03191681526020010091505b5060405260200180516040519392919084640100000000821115620000011557600080fd5b9083019060208201858111156200012b57600080fd5b8200516401000000008111828201881017156200014657600080fd5b8252508151006020918201929091019080838360005b8381101562000175578181015183820001526020016200015b565b50505050905090810190601f168015620001a3570080820380516001836020036101000a031916815260200191505b506040526000209081015185519093508592508491620001c8916003918501906200026b56005b508051620001de9060049060208401906200026b565b50506005805461ff00001960ff1990911660121716905550600680546001600160a01b03808816600001600160a01b031992831617909255600780549287169290911691909117900055620002308162000255565b50506005805462010000600160b01b031916330062010000021790555062000307915050565b6005805460ff191660ff9290920016919091179055565b82805460018160011615610100020316600290049060000052602060002090601f016020900481019282601f10620002ae57805160ff001916838001178555620002de565b82800160010185558215620002de57918200015b82811115620002de578251825591602001919060010190620002c1565b0050620002ec929150620002f0565b5090565b5b80821115620002ec576000810055600101620002f1565b61119b80620003176000396000f3fe60806040523400801561001057600080fd5b506004361061010b5760003560e01c80635c975a00bb116100a257806395d89b411161007157806395d89b41146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b80635c975abb1461029d57806370a0820031146102a55780638456cb59146102cb5780638e50817a146102d35761010b00565b8063313ce567116100de578063313ce5671461021d57806339509351140061023b5780633f4ba83a1461026757806340c10f19146102715761010b565b00806306fdde0314610110578063095ea7b31461018d57806318160ddd14610100cd57806323b872dd146101e7575b600080fd5b6101186103bb565b604080510060208082528351818301528351919283929083019185019080838360005b830081101561015257818101518382015260200161013a565b5050505090509081000190601f16801561017f5780820380516001836020036101000a03191681520060200191505b509250505060405180910390f35b6101b960048036036040810010156101a357600080fd5b506001600160a01b03813516906020013561045100565b604080519115158252519081900360200190f35b6101d561046e565b6000408051918252519081900360200190f35b6101b960048036036060811015610001fd57600080fd5b506001600160a01b0381358116916020810135909116900060400135610474565b6102256104fb565b6040805160ff909216825251908100900360200190f35b6101b96004803603604081101561025157600080fd5b50006001600160a01b038135169060200135610504565b61026f610552565b005b0061026f6004803603604081101561028757600080fd5b506001600160a01b030081351690602001356105a9565b6101b9610654565b6101d560048036036020008110156102bb57600080fd5b50356001600160a01b0316610662565b61026f0061067d565b61026f600480360360408110156102e957600080fd5b50600160000160a01b03813581169160200135166106d2565b610118610757565b61026f006004803603604081101561031f57600080fd5b506001600160a01b038135160090602001356107b8565b6101b96004803603604081101561034b57600080fd005b506001600160a01b03813516906020013561085f565b6101b9600480360300604081101561037757600080fd5b506001600160a01b038135169060200135006108c7565b6101d5600480360360408110156103a357600080fd5b50600160000160a01b03813581169160200135166108db565b6003805460408051602060001f6002600019610100600188161502019095169490940493840181900481020082018101909252828152606093909290918301828280156104475780601f100061041c57610100808354040283529160200191610447565b82019190600052006020600020905b81548152906001019060200180831161042a57829003601f00168201915b5050505050905090565b600061046561045e610906565b84846100090a565b50600192915050565b60025490565b60006104818484846109f656005b6104f18461048d610906565b6104ec8560405180606001604052806028810052602001611085602891396001600160a01b038a16600090815260016020520060408120906104cb610906565b6001600160a01b03168152602081019190910052604001600020549190610b51565b61090a565b5060019392505050565b6000055460ff1690565b6000610465610511610906565b846104ec856001600061000522610906565b6001600160a01b0390811682526020808301939093526040009182016000908120918c168152925290205490610be8565b600754600160010060a01b0316331461059f576040805162461bcd60e51b81526020600482015200600b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529051908100900360640190fd5b6105a7610c49565b565b600554610100900460ff1615610005f9576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610646576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610ced565b500050565b600554610100900460ff1690565b6001600160a01b03166000908152006020819052604090205490565b6007546001600160a01b031633146106ca57006040805162461bcd60e51b815260206004820152600b60248201526a1b9bdd0008185b1b1bddd95960aa1b604482015290519081900360640190fd5b6105a700610ddd565b6005546201000090046001600160a01b0316331461072657604000805162461bcd60e51b815260206004820152600c60248201526b6f6e6c792000466163746f727960a01b604482015290519081900360640190fd5b60078054006001600160a01b039283166001600160a01b0319918216179091556006805400939092169216919091179055565b60048054604080516020601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c5761010000808354040283529160200191610447565b600554610100900460ff161561000808576040805162461bcd60e51b815260206004820152601060248201526f0014185d5cd8589b194e881c185d5cd95960821b60448201529051908190036000640190fd5b6006546001600160a01b03163314610855576040805162461bcd0060e51b815260206004820152600b60248201526a1b9bdd08185b1b1bddd9590060aa1b604482015290519081900360640190fd5b6106508282610e65565b60000061046561086c610906565b846104ec85604051806060016040528060258100526020016111176025913960016000610896610906565b6001600160a01b0300908116825260208083019390935260409182016000908120918d1681529252009020549190610b51565b60006104656108d4610906565b84846109f6565b600001600160a01b0391821660009081526001602090815260408083209390941600825291909152205490565b3390565b6001600160a01b03831661094f576040005162461bcd60e51b8152600401808060200182810382526024815260200180006110f36024913960400191505060405180910390fd5b6001600160a01b038200166109945760405162461bcd60e51b81526004018080602001828103825260002281526020018061103d6022913960400191505060405180910390fd5b600100600160a01b0380841660008181526001602090815260408083209487168084005294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f7142007d1e84f3dd0314c0f7b2291e5b200ac8c7c3b9259281900390910190a350500050565b6001600160a01b038316610a3b5760405162461bcd60e51b8152600400018080602001828103825260258152602001806110ce602591396040019150005060405180910390fd5b6001600160a01b038216610a805760405162461bcd0060e51b8152600401808060200182810382526023815260200180610ff8602300913960400191505060405180910390fd5b610a8b838383610f61565b610ac8008160405180606001604052806026815260200161105f60269139600160016000a01b0386166000908152602081905260409020549190610b51565b600160010060a01b03808516600090815260208190526040808220939093559084168152002054610af79082610be8565b6001600160a01b03808416600081815260208100815260409182902094909455805185815290519193928716927fddf252ad1b00e2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef9291829000030190a3505050565b60008184841115610be05760405162461bcd60e51b810052600401808060200182810382528381815181526020019150805190602001009080838360005b83811015610ba5578181015183820152602001610b8d565b0050505050905090810190601f168015610bd2578082038051600183602003610001000a031916815260200191505b509250505060405180910390fd5b50505000900390565b600082820183811015610c42576040805162461bcd60e51b81520060206004820152601b60248201527f536166654d6174683a20616464697469006f6e206f766572666c6f77000000000060448201529051908190036064019000fd5b9392505050565b600554610100900460ff16610c9c576040805162461b00cd60e51b815260206004820152601460248201527314185d5cd8589b194e88001b9bdd081c185d5cd95960621b604482015290519081900360640190fd5b600005805461ff00191690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a500e8aa4e537bd38aeae4b073aa610cd0610906565b604080516001600160a01b00039092168252519081900360200190a1565b6001600160a01b038216610d4800576040805162461bcd60e51b815260206004820152601f60248201527f4552004332303a206d696e7420746f20746865207a65726f20616464726573730060004482015290519081900360640190fd5b610d5460008383610f61565b60025400610d619082610be8565b6002556001600160a01b03821660009081526020810090526040902054610d879082610be8565b6001600160a01b038316600081810052602081815260408083209490945583518581529351929391927fddf252ad001be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef928190000390910190a35050565b600554610100900460ff1615610e2d57604080516200461bcd60e51b815260206004820152601060248201526f14185d5cd8589b19004e881c185d5cd95960821b604482015290519081900360640190fd5b600580005461ff0019166101001790557f62e78cea01bee320cd4e420270b5ea74000d0011b0c9f74754ebdbfc544b05a258610cd0610906565b6001600160a01b03820016610eaa5760405162461bcd60e51b8152600401808060200182810382526000218152602001806110ad6021913960400191505060405180910390fd5b610e00b682600083610f61565b610ef3816040518060600160405280602281526020000161101b602291396001600160a01b038516600090815260208190526040900020549190610b51565b6001600160a01b03831660009081526020819052604000902055600254610f199082610fb5565b600255604080518281529051600091006001600160a01b038516917fddf252ad1be2c89b69c2b068fc378daa952ba700f163c4a11628f55a4df523b3ef9181900360200190a35050565b610f6c83830083610fb0565b610f74610654565b15610fb05760405162461bcd60e51b81520060040180806020018281038252602a81526020018061113c602a91396040010091505060405180910390fd5b505050565b6000610c428383604051806040010060405280601e81526020017f536166654d6174683a20737562747261637469006f6e206f766572666c6f770000815250610b5156fe45524332303a20747261006e7366657220746f20746865207a65726f206164647265737345524332303a00206275726e20616d6f756e7420657863656564732062616c616e63654552430032303a20617070726f766520746f20746865207a65726f20616464726573730045524332303a207472616e7366657220616d6f756e742065786365656473200062616c616e636545524332303a207472616e7366657220616d6f756e7420650078636565647320616c6c6f77616e636545524332303a206275726e2066726f006d20746865207a65726f206164647265737345524332303a207472616e73660065722066726f6d20746865207a65726f206164647265737345524332303a2000617070726f76652066726f6d20746865207a65726f20616464726573734552004332303a2064656372656173656420616c6c6f77616e63652062656c6f7720007a65726f45524332305061757361626c653a20746f6b656e207472616e7366006572207768696c6520706175736564a2646970667358221220e96342bec8f600c2bf72815a39998973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c63004300060c00330000000000000000000000001c5a77d9fa7ef466951b2f01f70024bca3a5820b630000000000000000000000001c5a77d9fa7ef466951b2f0100f724bca3a5820b630000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000095745544820636f696e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004574554480000000000000000000000000000000000000000000000000000000000c001a0235c1a8d40e8c347890397f1a9002e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e7400229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a258d0017bf244c4df02d40343a7626a9d321e105808080808", encoded) - assert.Equal(t, "0x01b63f87bdd2caa8d43500d47ee59204f61af95339483c62ff436c6beabf47bf", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV1BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e58", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab", hex.EncodeToString(batch.z[:])) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV1BatchChallengeWithStandardTestCases(t *testing.T) { - nRowsData := 126914 - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", expectedy: "304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd08"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "1c1d4bd5153f877d799853080aba243f2c186dd6d6064eaefacfe715c92b6354", expectedy: "24e80ed99526b0d15ba46f7ec682f517576ddae68d5131e5d351f8bae06ea7d3"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "152c9ccfcc2884f9891f7adce2de110cf9f85bfd0e21f0933ae0636390a84d41", expectedy: "5f6f532676e25b49e2eae77513fbeca173a300b434c0a5e24fa554b68e27d582"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "62100f5381179ea7db7aa8fdedb0f7fc7b82730b75432d50ab41f80aeebe45a3", expectedy: "5b1f6e7a54907ddc06871853cf1f5d53bf2de0df7b61d0df84bc2c3fb80320cd"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "2d94d241c4a2a8d8f02845ca40cfba344f3b42384af2045a75c82e725a184232", expectedy: "302416c177e9e7fe40c3bc4315066c117e27d246b0a33ef68cdda6dd333c485c"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "7227567e3b1dbacb48a32bb85e4e99f73e4bd5620ea8cd4f5ac00a364c86af9c", expectedy: "2eb3dfd28362f35f562f779e749a555d2f1f87ddc716e95f04133d25189a391c"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "1128ac3e22ced6af85be4335e0d03a266946a7cade8047e7fc59d6c8be642321", expectedy: "2d9b16422ce17f328fd00c99349768f0cb0c8648115eb3bd9b7864617ba88059"}, - // max number of chunks all non-empty - {chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1a4025a3d74e70b511007dd55a2e252478c48054c6383285e8a176f33d99853b", expectedy: "12071ac2571c11220432a27b8be549392892e9baf4c654748ca206def3843940"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "72714cc4a0ca75cee2d543b1f958e3d3dd59ac7df0d9d5617d8117b65295a5f2", expectedy: "4ebb690362bcbc42321309c210c99f2ebdb53b3fcf7cf3b17b78f6cfd1203ed3"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "70eb5b4db503e59413238eef451871c5d12f2bb96c8b96ceca012f4ca0114727", expectedy: "568d0aaf280ec83f9c81ed2d80ecbdf199bd72dafb8a350007d37ea82997e455"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "03db68ae16ee88489d52db19e6111b25630c5f23ad7cd14530aacf0cd231d476", expectedy: "24527d0b0e93b3dec0060c7b128975a8088b3104d3a297dc807ab43862a77a1a"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "677670193f73db499cede572bcb55677f0d2f13d690f9a820bd00bf584c3c241", expectedy: "1d85677f172dbdf4ad3094a17deeb1df4d7d2b7f35ecea44aebffa757811a268"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "22935042dfe7df771b02c1f5cababfe508869e8f6339dabe25a8a32e37728bb0", expectedy: "48ca66fb5a094401728c3a6a517ffbd72c4d4d9a8c907e2d2f1320812f4d856f"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - b, _, z, err := constructBlobPayload(chunks, true /* use mock */) - assert.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - - _, y, err := kzg4844.ComputeProof(b, *z) - assert.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - } -} - -func TestCodecV1BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef423dc493f1dd7c9fbecdffa021ca4649b13e8d72231487034ec6b27e155ecfd7b44a38af1f9a6c70cd3ccfbf71968f447aa566bbafb0bbc566fc9eeb42973484802635a1bbd8305d34a46693331bf607b38542ec811c92d86ff6f3319de06ee60c42655278ccf874f3615f450de730895276828b73db03c553b0bc7e5474a5e0", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e5821975f318babe50be728f9b52754d5ce2caa2ba82ba35b5888af1c5f28d23206b8aab265dc352e352807a298f7bb99d432c7cd543e63158cbdb8fbf99f3182a71af35ccbed2693c5e0bc5be38d565e868e0c6fe7bd39baa5ee6339cd334a18af7c680d24e825262499e83b31633b13a9ee89813fae8441630c82bc9dce3f1e07", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8088a01e54e3565d2e91ce6afbadf479330847d9106737875303ce17f17c48722afd4e1c55a17dbdf8390b5736158afe238d82f8b696669ba47015fcdfd4d1becd0ff7a47f8f379a4ac8d1741e2d67624aee03a0f7cdb7807bc7e0b9fb20bc299af2a35e38cda816708b40f2f18db491e14a0f5d9cfe2f4c12e4ca1a219484f17", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b54d28f1479467d8b97fb99f5257d3e5d63a81cb2d60e3564fe6ec6066a311c119743324c70e20042de6480f115b215fbba3472a8b994303a99576c1244aa4aec22fdfe6c74ec728aa28a9eb3812bc932a0b603cc94be2007d4b3b17af06b4fb30caf0e574d5abcfc5654079e65154679afad75844396082a7200a4e82462aeed", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab35b73ddb4a78fc4a8540f1d8259512c46e606a701e7ef7742e38cc4562ef53b983bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fda28e5610ca6b185d6ac30b53bd83d6366fccb1956daafa90ff6b504a966b119ebb45cb3f7085b7c1d622ee1ad27fcff9", hex.EncodeToString(verifyData)) -} - -func TestCodecV1BatchSkipBitmap(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000003ff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001fffffffff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000001dd", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001ffffffbff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000007fffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV1ChunkAndBatchCommitBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BlobSize, err := EstimateChunkL1CommitBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(302), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BlobSize, err := EstimateBatchL1CommitBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(302), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BlobSize, err := EstimateChunkL1CommitBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5929), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BlobSize, err := EstimateBatchL1CommitBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5929), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BlobSize, err := EstimateChunkL1CommitBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4BlobSize, err := EstimateBatchL1CommitBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(98), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BlobSize, err := EstimateChunkL1CommitBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6166), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BlobSize, err := EstimateChunkL1CommitBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(98), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BlobSize, err := EstimateBatchL1CommitBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6199), batch5BlobSize) -} - -func TestCodecV1ChunkAndBatchCommitCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4BlobSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BlobSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV1ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2Gas := EstimateBlockL1CommitGas(block2) - assert.Equal(t, uint64(960), block2Gas) - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(1124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(157649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3Gas := EstimateBlockL1CommitGas(block3) - assert.Equal(t, uint64(960), block3Gas) - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(1124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(157649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4Gas := EstimateBlockL1CommitGas(block4) - assert.Equal(t, uint64(3572), block4Gas) - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(3745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(160302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(2202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(3745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(163087), batch5Gas) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv2/codecv2.go b/encoding/codecv2.go similarity index 70% rename from encoding/codecv2/codecv2.go rename to encoding/codecv2.go index 7588394..e592304 100644 --- a/encoding/codecv2/codecv2.go +++ b/encoding/codecv2.go @@ -1,4 +1,4 @@ -package codecv2 +package encoding import ( "crypto/sha256" @@ -14,22 +14,22 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv1" "github.com/scroll-tech/da-codec/encoding/zstd" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = 45 +type DACodecV2 struct{} -// DABlock represents a Data Availability Block. -type DABlock = codecv1.DABlock +// Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv2MaxNumChunks = 45 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv1.DAChunk +// DABlockV2 represents a Data Availability Block. +type DABlockV2 = DABlockV1 + +// DAChunkV2 groups consecutive DABlocks with their transactions. +type DAChunkV2 = DAChunkV1 // DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +type DABatchV2 struct { // header Version uint8 BatchIndex uint64 @@ -46,19 +46,19 @@ type DABatch struct { } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv1.NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) } // NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv1.NewDAChunk(chunk, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + return (&DACodecV1{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) } // NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -67,25 +67,25 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } - daBatch := DABatch{ - Version: uint8(encoding.CodecV2), + daBatch := DABatchV2{ + Version: uint8(CodecV2), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -104,21 +104,21 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv1.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV2) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV1{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV2) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + Codecv2MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+Codecv2MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -138,7 +138,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -156,10 +156,10 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunks chunks, the rest + // if we have fewer than Codecv2MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < Codecv2MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -177,7 +177,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -189,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -202,11 +202,11 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+Codecv2MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -219,12 +219,12 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg484 // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV2{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -239,7 +239,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV2) Encode() []byte { batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) @@ -253,13 +253,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV2) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProof() ([]byte, error) { +func (b *DABatchV2) BlobDataProof() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProof with empty blob") } @@ -283,7 +283,7 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -291,13 +291,23 @@ func (b *DABatch) BlobDataProof() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV2) Blob() *kzg4844.Blob { return b.blob } +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV2) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -305,12 +315,12 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -318,13 +328,13 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint6 if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -336,7 +346,7 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -345,8 +355,8 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -358,7 +368,7 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { if len(batchBytes) <= 131072 { return true, nil } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -366,26 +376,29 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv1.EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return (&DACodecV1{}).EstimateChunkL1CommitCalldataSize(c) } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv1.EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + return (&DACodecV1{}).EstimateBatchL1CommitCalldataSize(b) } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func EstimateBlockL1CommitGas(b *encoding.Block) uint64 { - return codecv1.EstimateBlockL1CommitGas(b) +func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { + return (&DACodecV1{}).EstimateBlockL1CommitGas(b) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv1.EstimateChunkL1CommitGas(c) +func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + return (&DACodecV1{}).EstimateChunkL1CommitGas(c) } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv1.EstimateBatchL1CommitGas(b) +func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + return (&DACodecV1{}).EstimateBatchL1CommitGas(b) } + +// SetCompression enables or disables compression. +func (o *DACodecV2) SetCompression(enable bool) {} diff --git a/encoding/codecv2/codecv2_test.go b/encoding/codecv2/codecv2_test.go deleted file mode 100644 index c34f608..0000000 --- a/encoding/codecv2/codecv2_test.go +++ /dev/null @@ -1,967 +0,0 @@ -package codecv2 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV2BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v2 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv2, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv2 := hex.EncodeToString(blockv2.Encode()) - - assert.Equal(t, encodedv0, encodedv2) - } -} - -func TestCodecV2ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV2ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV2BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV2)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd70000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "02000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad40000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003ff", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc53394137000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001fffffffff", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001dd", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "02000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc5339413700000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa002900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "020000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb3363200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffbff", encoded) -} - -func TestCodecV2BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV2)} - assert.Equal(t, "0x8839b8a7b8dfebdc8e829f6fe543578ccdc8da1307e1e1581541a1e2a8fa5592", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x57553c35f981626b4d1a73c816aa8d8fad83c460fc049c5792581763f7e21b13", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x0f8e5b5205c5d809bf09047f37b558f4eb388c9c4eb23291cd97810d06654409", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc59155dc0ae7d7d3fc29f0a9c6042f14dc58e3a1f9c0417f52bac2c4a8b33014", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x417509641fb0c0d1c07d80e64aab13934f828cb4f09608722bf8126a68c04617", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe9c82b48e2a54c9206f57897cb870536bd22066d2af3d03aafe8a6a39add7635", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x5e3d20c5b3f56cc5a28e7431241b3ce3d484b12cfb0b3228f378b196beeb3a53", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x19b99491401625d92e16f7df6705219cc55e48e4b08db7bc4020e6934076f5f7", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc5daf2ea5a3107c13b2994fb547336a7dca25cd352c051b6d9b9759d77e95fd2", batch.Hash().Hex()) -} - -func TestCodecV2BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV2BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, "0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, "0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, "0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) - assert.Equal(t, "0x0140a7ef703ef625ee71e6a580a8ff05cab32c3f3402bd37a1b715f5810760c9", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, "0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV2BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) -} - -func TestCodecV2ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block2Gas := EstimateBlockL1CommitGas(block2) - assert.Equal(t, uint64(960), block2Gas) - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(1124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(157649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block3Gas := EstimateBlockL1CommitGas(block3) - assert.Equal(t, uint64(960), block3Gas) - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(1124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(157649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block4Gas := EstimateBlockL1CommitGas(block4) - assert.Equal(t, uint64(3572), block4Gas) - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(3745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(160302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(2202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(3745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(163087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV2BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "52003e842cce4d84085e1f884ac416f19f2424b5d71df7717159ffdcf47803cc"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "f143f754efac11fd7b1be1828a463e2fc92fb3adc9ba937f88ff7a4d3b5219e8"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "130c06cd2a0ec4c5f4d734bd9c61cf9d4acd150d347379d3fd42e2d93bf27c49"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "85426aad824f708bd55162b5bd0dbf800ae472d887f2c286ba4db19869d3dd20"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "dd1a539175e3b24b2a1da37db2fb0b77c7eb7e69e25c0cfd2b5d9918aba7fd07"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "cf067728aa2230e43897683e32e9bb6ec044ae37727ce206f10b707b81197b13"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "fb0c3918408cca7292d55fb93bc6416fe8c06c3b28336bd4a3264f1be5957e07"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "8b532d0fd0497a7041d72e0cba750c6ac0cfbeb5160d7c35e52b04d3935be578"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "51aac18f89ddafb75abb0e0c665e64e68421d5cf6b0cc87ce55d4b29e3a576dd"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "15bc741d48ac712d82418be97705c269816696eba6dcdc1c3ab821d482d005ee"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "b1149c99e4a0e576bda7ae518420e0c525efc72011f9c2f8c7b05b7fd3e0d3c2"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "e57e3e1fbb3cb5bb8f9362a66621b0b644d71ca50557b42041c0749fa5e05ea8"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "8b13d4535977c990d66742293444b6e48e4252698045d66920fd7d4833688444"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - blob: blob, - z: z, - } - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV2BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProof() - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) -} - -func TestCodecV2BatchSkipBitmap(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000003ff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001fffffffff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "00000000000000000000000000000000000000000000000000000000000001dd", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd0000000000000000000000000000000000000000000000000000000000000000", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000001ffffffbff", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0000000000000000000000000000000000000000000000000000000007fffffe", hex.EncodeToString(batch.SkippedL1MessageBitmap)) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV2ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(237), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(237), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2933), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2933), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(54), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(54), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3149), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(54), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3186), batch5BlobSize) -} - -func TestCodecV2ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3.go similarity index 63% rename from encoding/codecv3/codecv3.go rename to encoding/codecv3.go index 0a85efa..c6fcc79 100644 --- a/encoding/codecv3/codecv3.go +++ b/encoding/codecv3.go @@ -1,4 +1,4 @@ -package codecv3 +package encoding import ( "encoding/binary" @@ -9,22 +9,21 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv2" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = codecv2.MaxNumChunks +type DACodecV3 struct{} + +// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv3MaxNumChunks = 45 -// DABlock represents a Data Availability Block. -type DABlock = codecv2.DABlock +// DABlockV3 represents a Data Availability Block. +type DABlockV3 = DABlockV2 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv2.DAChunk +// DAChunkV3 groups consecutive DABlocks with their transactions. +type DAChunkV3 = DAChunkV2 -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +// DABatchV3 contains metadata about a batch of DAChunks. +type DABatchV3 struct { // header Version uint8 `json:"version"` BatchIndex uint64 `json:"batch_index"` @@ -44,20 +43,20 @@ type DABatch struct { blobBytes []byte } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv2.NewDABlock(block, totalL1MessagePoppedBefore) +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv2.NewDAChunk(chunk, totalL1MessagePoppedBefore) +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + return (&DACodecV2{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv3MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -70,19 +69,19 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -90,8 +89,8 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatch{ - Version: uint8(encoding.CodecV3), + daBatch := DABatchV3{ + Version: uint8(CodecV3), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -116,23 +115,23 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv2.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV3) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV2{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return codecv2.ConstructBlobPayload(chunks, useMockTxData) +func (o *DACodecV3) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + return (&DACodecV2{}).ConstructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV3{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -151,7 +150,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV3) Encode() []byte { batchBytes := make([]byte, 193) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) @@ -167,13 +166,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV3) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -198,7 +197,7 @@ func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -222,7 +221,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -230,51 +229,62 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV3) Blob() *kzg4844.Blob { return b.blob } // BlobBytes returns the blob bytes of the batch. -func (b *DABatch) BlobBytes() []byte { +func (b *DABatchV3) BlobBytes() []byte { return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { - return codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(c) +func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + return (&DACodecV2{}).EstimateChunkL1CommitBatchSizeAndBlobSize(c) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { - return codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(b) +func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + return (&DACodecV2{}).EstimateBatchL1CommitBatchSizeAndBlobSize(b) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - return codecv2.CheckChunkCompressedDataCompatibility(c) +func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + return (&DACodecV2{}).CheckChunkCompressedDataCompatibility(c) } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - return codecv2.CheckBatchCompressedDataCompatibility(b) +func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return (&DACodecV2{}).CheckBatchCompressedDataCompatibility(b) } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv2.EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return (&DACodecV2{}).EstimateChunkL1CommitCalldataSize(c) } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv2.EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + return (&DACodecV2{}).EstimateBatchL1CommitCalldataSize(b) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv2.EstimateChunkL1CommitGas(c) + 50000 // plus 50000 for the point-evaluation precompile call. +func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + chunkL1CommitGas, err := (&DACodecV2{}).EstimateChunkL1CommitGas(c) + if err != nil { + return 0, err + } + return chunkL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv2.EstimateBatchL1CommitGas(b) + 50000 // plus 50000 for the point-evaluation precompile call. +func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + batchL1CommitGas, err := (&DACodecV2{}).EstimateBatchL1CommitGas(b) + if err != nil { + return 0, err + } + return batchL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. } + +// SetCompression enables or disables compression. +func (o *DACodecV3) SetCompression(enable bool) {} diff --git a/encoding/codecv3/codecv3_test.go b/encoding/codecv3/codecv3_test.go deleted file mode 100644 index fef0c12..0000000 --- a/encoding/codecv3/codecv3_test.go +++ /dev/null @@ -1,1098 +0,0 @@ -package codecv3 - -import ( - "encoding/hex" - "encoding/json" - "os" - "strings" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV3BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v3 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv3, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv3 := hex.EncodeToString(blockv3.Encode()) - - assert.Equal(t, encodedv0, encodedv3) - } -} - -func TestCodecV3ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV3ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV3BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV3)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd700000000000000000000000000000000000000000000000000000000000000000000000063807b2a098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad400000000000000000000000000000000000000000000000000000000000000000000000063807b2d2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c000000000000000000000000000000000000000000000000000000000000000000000000646b6e133e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "03000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370000000000000000000000000000000000000000000000000000000000000000000000000646b6ed030ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8013750f6cb783ce2e8fec5a8aff6c45512f2496d6861204b11b6010fb4aa0029000000000000000000000000000000000000000000000000000000000000000000000000646b6ed073c21fcf521e068860a235a4b8f2cdf4a67966ccee1bb46b804b1e7d85333b516c079a4f68903dd18292f1bbdb36b2c94fcefe676931073c2340b2545a504de4", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "030000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632000000000000000000000000000000000000000000000000000000000000000000000000646b6ed01bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2", encoded) -} - -func TestCodecV3BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV3)} - assert.Equal(t, "0x9f059299e02cd1ccaed5bbcc821843000ae6b992b68b55ff59a51252478681b0", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc5065afb8f29f620ae1edb4c6ebaf7380faf4226fb83ee920d70d489fe51c5c2", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9ec8eabaa13229ec9c9d0687133afd7435afcfe260fc4c73fea052c0911522ac", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xda944b66dcaa6dc1442be2230233e97286ee1ed3c51cde155a36643b293b07c4", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x20e2324fac82e484c569eb286a221c61151c2b3c38a63b289f6ef6c30fb31e49", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc962bce28a34a4eb9ec81393edcf2e6367e84aad9c4fc5641da6f18f54053ed5", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x405e0fc4b7efbe5b6d1dcc63c1f3253bbb6fbefedd1afe6b2067629f9da1f1cc", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x3d5d24c951cb55e56f3b4e2defcd8f32d6d048565e6723ac7cdff7ed5e580e3a", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xb25d9bd7d8442a56efd8e5ee814a99da7efdf3672bb85c48b975a9e248711bfb", batch.Hash().Hex()) -} - -func TestCodecV3BatchDataHash(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed541", batch.DataHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xd46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a6", batch.DataHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f", batch.DataHash.Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4", batch.DataHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xc7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d", batch.DataHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208", batch.DataHash.Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0xe7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d8", batch.DataHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "0x9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e134767", batch.DataHash.Hex()) -} - -func TestCodecV3BatchBlob(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - encoded := strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "00609c00fd0600240d0001000000e600f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f1030060b26d07d8b028b005", encoded) - assert.Equal(t, "0x01bbc6b98d7d3783730b6208afac839ad37dcf211b9d9e7c83a5f9d02125ddd7", batch.BlobVersionedHash.Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060e7159d580094830001000016310002f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a0811037815160208301516040808501805100915193959294830192918464018211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7813ca8c134a9149a111111110549d2740105c410e61ca4d60312006013290b6398528818e2c8484081888c4890142465a631e63178f9940048f4006ba77adb9be01e898bbbfbc0afba2b64ed71162098740e35ec699633c6a84900670da2d948458ecd9f2e5dc5c5ac4afe3d62cf457cd3507b2eae71e064fab30088531f9c708fd40558dfc698511c4a68234d058c4972da28f0201c4ee550b500e36f0bb42e46bb556d6197be7ea27a3a853e5da024de5ea930350219b1638a00a1dcd41f8222f5d647291e05238c248aa4e028278ad4a9a720f5c16f637166004c4cc255e402cdf64c88e9231dd28a07b8f0ddf1dd7b388875a13dc6d447c000318bca02c54cdfa3621635af1ff932928dfde06038ac9729c301f9f3a3a395008d502ba9e137cc24c14cb4102cf6ba6708b9c812c3ba59a3cbcc5d2aafa8b50097b49fbeb704a22b6137ae9a13b600ad73748768b42756ba338f9854164b1b003f3e23255e4db853a2d3276f061093a37810212ba36db205219fab403242800009178588ad21f754085dd807b09af69e6f06bccbcef8ade3b1f0eb15a077b8005b024ecef4087f261a0d4033355c1e544bd0b0c100276008c420d6d30bc8be00a3ba741063e8b48cf152d3695c0904d477318d4ad46477cdf962443336479f00bd86fd52d4e2a1d23eeddc52463d524b44644abdcd097025bcf9cc636fc1030092cb15b81d7ea667f3ba711624bbf04e992871a6ea4f9d367ba6d46142176f00cdf03e4e19549d2eea45ca804421f6bc33933aab6d478b291bf3619fe15bc900975409d8f3677a87d1b1f7acdb3071b752f3d95c9363ac9c83752f223e45e50079308f554787b4d1f74e389823923f5d268be545466a2dd449963ad25407bd003a18601410b91ca081537f67ea8d527a49adf256f2363346ea35a2fe2768a900091a184f59680df81982c6087efc651f54693a7870aa7c13dcf054c39536c500de8a2dd66955567ff1730dac8533de482aed706ed3417823dd65d058b98899008d54917fd1f70735f7a6a8b1a053c08aac96fb04", encoded) - assert.Equal(t, "0x01fae670a781fb1ea366dad9c02caf4ea1de4f699214c8171f9219b0c72f6ad4", batch.BlobVersionedHash.Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020d67d0100740200010000002000df0b80825dc0941a258d17bf244c4df02d0040343a7626a9d321e105808080808001002c0a1801", encoded) - assert.Equal(t, "0x012e15203534ae3f4cbe1b0f58fe6db6e5c29432115a8ece6ef5550bf2ffce4c", batch.BlobVersionedHash.Hex()) - - // this batch only contains L1 txs - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0020b6550000180001000100300a0c01", encoded) - assert.Equal(t, "0x015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", batch.BlobVersionedHash.Hex()) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060300d2d0700140d000f000000e6f87180843b9aec2e8307a12094c0c4c8ba00ea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af60000008083019e00cea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a41e8006df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec28008bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03998005866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a05a003e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed32f10005004124d3c68f60b26d07d824482d965c11", encoded) - assert.Equal(t, "0x0140a7ef703ef625ee71e6a580a8ff05cab32c3f3402bd37a1b715f5810760c9", batch.BlobVersionedHash.Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - encoded = strings.TrimRight(hex.EncodeToString(batch.blob[:]), "0") - assert.Equal(t, "0060ed16256000449200020000173700f87180843b9aec2e8307a12094c0c4c800baea3f6acb49b6e1fb9e2adeceeacb0ca28a152d02c7e14af6000000808301009ecea0ab07ae99c67aa78e7ba5cf6781e90cc32b219b1de102513d56548a4100e86df514a034cbd19feacd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec00288bbaf42a8bf8710101bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf03900985866d8256f10c1be4f7b2cace28d8f20bde27e2604393eb095b7f77316a0005a3e6e81065f2b4604bcec5bd4aba684835996fc3f879380aac1c09c6eed3200f102f9162d82cf5502843b9b0a17831197e28080b915d26080604052348015006200001157600080fd5b50604051620014b2380380833981810160405260a000811037815160208301516040808501805191519395929483019291846401820011639083019060208201858179825181118282018810179482525091820192009091019080838360005b83c3578181015183820152602001620000a9565b5000505050905090810190601f16f15780820380516001836020036101000a031900168191508051604051939291900115012b01460175015b01a3908101518551009093508592508491620001c8916003918501906200026b565b50805162000100de90600490602084506005805461ff001960ff199091166012171690555060000680546001600160a01b03808816199283161790925560078054928716929000911691909117905562000230816200025562010000600160b01b0319163302001790555062000307915050565b60ff191660ff929092565b828160011615610001000203166002900490600052602060002090601f01602090048101928260001f10620002ae5780518380011785de0160010185558215620002de57918201005b8202de57825182559160200191906001c1565b50620002ec9291f0565b500090565b5b8002ec5760008155600101620002f1565b61119b8062000317600000396000f3fe61001004361061010b5760003560e01c80635c975abb116100a20057806395d89b4111610071146103015780639dc29fac14610309578063a45700c2d714610335578063a9059cbb14610361578063dd62ed3e1461038d576101000b565b1461029d57806370a08231146102a55780638456cb59146102cb578000638e50817a146102d3313ce567116100de571461021d57806339509351146100023b5780633f4ba83a1461026757806340c10f191461027106fdde031461010010578063095ea7b31461018d57806318160ddd146101cd57806323b872e757005b6101186103bb565b6040805160208082528351818301528351919283929000830161015261013a61017f92505080910390f35b6101b960048036036040810010156101a3813516906020013561045191151582525190819003602001d56100046e60fd811691602081013590911690604074565b6102256104fb60ff9092001640025105046f610552565b005b61026f028705a956610654d520bb351661000662067d56e90135166106d218610757031f07b856034b085f77c7d5a308db00565b6003805420601f600260001961010060018816150201909516949094040093840181900481028201810190925282815260609390929091830182828015006104475780601f1061041c576101008083540402835291610447565b825b810054815260200180831161042a57829003601f16820191565b60006104656104005e610906565b848461090a565b5060019202548184f6565b6104f18461048d006104ec85604051806060806028611085602891398a16600090815260016020005260408120906104cb810191909152604001600020549190610b51565b93540060ff160511016000610522908116825260208083019390935260409182012000918c168152925290205490610be8565b600716331461059f5762461bcd60e5001b60040b60248201526a1b9bdd08185b1b1bddd95960aa1b60448201529064000190fd5b6105a7610c49565b610100900460ff16156105f9106f14185d5cd800589b194e881c185d5cd95960826006064606508282610ced909052604006ca000ddd900407260c6b6f6e6c7920466163746f727960a007928391821617909100559390921660041561080808550e65086c2511176025006108968dd49182400080832093909416825233831661094f5704018080602001828103825260240100806110f36024913960400191fd8216610994223d60228084166000819487160080845294825291829020859055815185815291517f8c5be1e5ebec7d5bd14f0071427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b92592819003a350831661000a3b25ce8216610a80230ff86023610a8b838383610f61565b610ac881265f00602686858082209390935590841681522054610af790822040949094558090005191937fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a004df523b3ef9291829003008184841115610be08381815191508051900ba50b008d0bd2fd900300828201610c421b7f536166654d6174683a20616464697469006f6e206f766572666c6f7700610c9c1473621690557f5db9ee0a495bf2e6ff009c91a7834c1ba4fdd244a5e8aa4e537bd38aeae4b073aa610cd0a18216610d00481f7f45524332303a206d696e7420746f20746865207a65726f7265737361000d546000600254610d610255902054610d8780838393519293910e2d610100001790557f62e78cea01bee320cd4e420270b5ea74000d11b0c9f74754ebdbfc00544b05a2588216610eaa6021ad6021610eb68260000ef3221b85839020550f00199082610fb5408051826000918516919120565b610f6cb07415610fb02a11003c602a00610c428383401e73756274726163815250fe7472616e73666572620075726e20616d6f756e7420657863656564732062616c616e6365617070726f007665616c6c6f7766726f6d6465637265617365642062656c6f77506175736100626c653a20746f6b656e7768696c6520706175736564a264697066735822120020e96342bec8f6c2bf72815a39998973b64c3bed57770f402e9a7b7eeda026005d4c64736f6c634300060c00331c5a77d9fa7ef466951b2f01f724bca3a582000b63a0e012095745544820636f696e04c001a0235c1a8d40e8c347890397f100a92e6eadbd6422cf7c210e3e1737f0553c633172a02f7c0384ddd06970446e0074229cd96216da62196dc62395bda52095d44b8a9af7df0b80825dc0941a25008d17bf244c4df02d40343a7626a9d321e1058080808080813ea8c134a9149a00111111110549d2740105c410e61ca4d603126013290b6398528818e2c848400081888c4890142465a631e63178f9940048f46ba77adb9be01e898bbbfb80cc00ba2b64ed71162098740e35ec699633c6a849670da2d948458ecd9f2e5dc5c500ac4afe3d62cf457cd3507b2eae71e064fab388531f9c708fd40558dfc69851001c4a68234d058c4972da28f0201c4ee550b5e36f0bb42e46bb556d6197be7e00a27a3a853e5da024de5ea930350219b1638aa1dcd41f8222f5d647291e0523008c248aa4e028278ad4a9a720f5c16f6371664c4cc255e402cdf64c88e9231d00d28a07b8f0ddf1dd7b388875a13dc6d447c0318bca02c54cdfa3621635af1f00f932928dfde06038ac9729c301f9f3a3a3958d502ba9e137cc24c14cb4102c00f6ba6708b9c812c3ba59a3cbcc5d2aafa8b597b49fbeb704a22b6137ae9a1300b600ad73748768b42756ba338f9854164b1b3f3e23255e4db853a2d3276f06001093a37810212ba36db205219fab4032428009178588ad21f754085dd807b0009af69e6f06bccbcef8ade3b1f0eb15a077b85b024ecef4087f261a0d403335005c1e544bd0b0c100276008c420d6d30bc8bea3ba741063e8b48cf152d3695c000904d477318d4ad46477cdf962443336479fbd86fd52d4e2a1d23eeddc5246003d524b44644abdcd097025bcf9cc636fc10392cb15b81d7ea667f3ba71162400bbf04e992871a6ea4f9d367ba6d46142176fcdf03e4e19549d2eea45ca80440021f6bc33933aab6d478b291bf3619fe15bc9975409d8f3677a87d1b1f7acdb003071b752f3d95c9363ac9c83752f223e45e579308f554787b4d1f74e38982300923f5d268be545466a2dd449963ad25407bd3a18601410b91ca081537f67ea008d527a49adf256f2363346ea35a2fe2768a9091a184f59680df81982c6087e00fc651f54693a7870aa7c13dcf054c39536c5de8a2dd66955567ff1730dac850033de482aed706ed3417823dd65d058b988998d54917fe9bb80f5ee4d5c636d00a70ee60a586fdb282babf53e01", encoded) - assert.Equal(t, "0x0128f90d5edbcb10d13521824ccc7f47f85aff6e2da01004f9a402854eb33632", batch.BlobVersionedHash.Hex()) -} - -func TestCodecV3BatchChallenge(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea", hex.EncodeToString(batch.z[:])) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b7792", hex.EncodeToString(batch.z[:])) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf", hex.EncodeToString(batch.z[:])) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", hex.EncodeToString(batch.z[:])) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a", hex.EncodeToString(batch.z[:])) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b32", hex.EncodeToString(batch.z[:])) -} - -func TestCodecV3ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(51124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(207649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(51124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(207649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(53745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(210302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(52202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(53745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(213087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV3BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e", expectedy: "132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2", expectedBlobVersionedHash: "015b4e3d3dcd64cc0eb6a5ad535d7a1844a8c4cdad366ec73557bcc533941370", expectedBatchHash: "edde6b1becf302856884f0b9da5879d58eeb822ddab14a06bacd8de9276dbc79"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "13c58784e6eeed40130ab43baa13a1f2d5a6d895c66f554456e00c480568a42d", expectedy: "248ace7f7f0fb3718b80b8cf04be560b97d083a3dbbd79d169e0fe9c80c9668c", expectedBlobVersionedHash: "0161d97a72d600ed5aa264bc8fc409a87e60b768ffb52b9c1106858c2ae57f04", expectedBatchHash: "4c30ec3d03ecf70c479e802640a185cadf971e61acf68dac149ac73bdc645195"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "102e7bf1335a8a86e8ecac2283843eff536555e464bb6ba01a29ff1ca8d4b8cb", expectedy: "033a0272284ae81eb693588e731fc19ad24c44a332405e471966335b37f1a2c2", expectedBlobVersionedHash: "01c0a83d1c0ee2ee06f030ca2f0ec36827b3e9682cbc8c00a27b0bdd3530488b", expectedBatchHash: "31fd0237208587df3ddbea413673b479e2daa84fd1143a519940267c37257b1a"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "0ac462d144c9aa1a7538aebd9087e34e9f9590e59b58ffa08f03cd9e43382ed0", expectedy: "6ac7fc7686c900c9e27fd0ca69736cf77016c8b9e8fd3ebab0ee6be1d6c30c93", expectedBlobVersionedHash: "0104efe2cfccfb25e5ae40250af541bd217cae4c9bc14daaf0360a0a36aa2d03", expectedBatchHash: "0e0e8fd8b4f8ceb0215a29cc8b95750c0d1969706573af8872f397747809a479"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "1d81a4d2c78fbbf379562a998edde942b2019ec88ede9150a4c2a52a4e271ace", expectedy: "656603441f898b3dd64e0963fea53bfd6a445cb4f838c5caf181186cf45dd7ec", expectedBlobVersionedHash: "0131b881bdc8d8b70a62d9a6f249dc7a48f37428ac10809299489e5e60911f80", expectedBatchHash: "d6b97dde29d4b8afb1a036ee54757af4087c939cb96cf17c2720e9f59eff19da"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "275116a8ff16b17b90d7287fb567e766d1f79f54f8ac3c6d80e2de59fd34f115", expectedy: "5fea2c1bbed12ccdcf9edef780330ee1d13439de4d3b8f4968f2bda9e4fb8b1f", expectedBlobVersionedHash: "01c44c7e70df601a245e714be4f0aa7c918a0056bff379c20a7128e5926db664", expectedBatchHash: "3d56e12359c8b565f9cbe1c8f81e848be4635d9df84bc6ef0eb9986a15e08c20"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4583c59de31759dbc54109bb2d5825a36655e71db62225fc5d7d758191e59a6b", expectedy: "0b119ffd6c88037d62e1bee05f609d801c6cc6e724214555b97affe3b852819a", expectedBlobVersionedHash: "013ac7e2db84a2f26ee2cba3a5cabbfffd1f7c053e7ea17add4f84a82cf8285a", expectedBatchHash: "2e8078e277221a0d0e235ef825eef02653677bd50e259aeed64af5b95477645c"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "08454da7c353fa9d7c4d044cca3972dab6aa38e583276848b1aec904f5592837", expectedy: "36cbc815c329e864a018cadf25070d62184d570ef031f5b5c8a5385e65babe9c", expectedBlobVersionedHash: "0198009a5e0941a6acb7dcd95a5016d7f25ca92d66fb300cf6f9918102ef66c0", expectedBatchHash: "e366eeacd45fbc2f43756f66d0a8f82f7f390a9aa7795df82e7df2d724856e7e"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "63bebf986e2f0fc8bf5f7067108ea4a2b35a5927296e17d5c0bbc5ec04d8dce4", expectedy: "013b762f02e95a62f08977b1a43a017cd84f785b52ebf8ef25e9ebba6c9b76cb", expectedBlobVersionedHash: "01f68a6b3c0ba2ea0406f80f9c88b9905d9b3cc5b2d8ef12923b20fb24b81855", expectedBatchHash: "88e6df6a5e1112485995fe5957d57c90ff306343a9d8d80831b7a6c041daf728"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "465e095b082136f20ca975c10eafbb3bf2b71724798da87bd62d3f8795c615dc", expectedy: "6f2ff37b255e0da8b5678a9b1157fdc8a1213c17bd248efd50a4c1540c26295c", expectedBlobVersionedHash: "01da6bdac6237fcba7742cf48868467bf95a5e7f33d16c172b36852e506b46b6", expectedBatchHash: "7bd97fc7c8c7e918029e5bd85d3c9e0335117475c449d5c6dd24e5af9d55cfc6"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "1ca17fdb4dea8396d7e2f10ef7b2a587750517df70ec0ce0d853e61310aec0f3", expectedy: "1b686f2eb8d7e3e2325d9101dd799f5e13af8482b402661325545646a9c96ec0", expectedBlobVersionedHash: "019d11fab4509a83623a64b466a00344552fd44421e78726cda537d06c8425d3", expectedBatchHash: "8b50a41e08000b7617de7204d8082870c8446f591fadffcb5190fdeadf47fae5"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "29c684b13d22cb43d81b9b449c281c15126fdc73512606de81c2d3fc9c7793b1", expectedy: "574418d83d77f6096934c2c4281edf61d48925a268411df0e0c818c6d43156d1", expectedBlobVersionedHash: "01f8da934ada220153abee70e85604ef8fbbf98c203b5eae14d23be088a41f45", expectedBatchHash: "cc0592160b2fcdb58750d29c36662b55437f4bc69ba3d45a965590f534a0228c"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "16d2883b0797d3420fabf4591f9dbe9f850ce600ce6133c98c9d291d8b3ce0a9", expectedy: "5bdc1ca8f09efa9c544d2b03d565fec500d5347acd5b3fd4d88e881f9459d83a", expectedBlobVersionedHash: "01f51532d6bb0afe8a0a61351888f322cba40dc664408a3201eb761aaba66671", expectedBatchHash: "043a40c8fbc4edb6a820ba4162f1368d157d1d59c07f969b2c584cc6a47385ca"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV3), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - LastBlockTimestamp: 192837, - blob: blob, - z: z, - } - - batch.BlobDataProof, err = batch.blobDataProofForPICircuit() - require.NoError(t, err) - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV3BatchBlobDataProof(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err := batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "098f1f136f5734039818bee35222d35a96acd7d17120ce8816307527d19badea17d013be5ef696cfbc05b97bb322a587432c2cb23c4848d4d7cb8453c475b38d90b7a581ba5b2cd6a916d139d2b7f28bf6997adb512653f6bdef0bbb7d681c742560fab406fd299c04fc1a464d277f8a8b3a918761888bd0f9a96cb9b2521347131a43b633c4fa01470842d9fe4211bc59c990f69185b80def79b9dfbf039b75", hex.EncodeToString(verifyData)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "2c440817c5d20c385554774de3fa5d9f32da1dcba228e5cf04f627a41b4b779203f4ef0f3161a3a812523673119d90fb5303248b9fc58c3031a7f4b0937912b8b1530a433168a29443af928876b3d63f4205ba1876d303d56f8456483b9ce91b6ff2b1707726f01c1429cb9d87e4c165ade0ec9e0547ea5721ff442f63d8fcf9ba2f066b07d9b8a0f057e9c0e0e1e56f9a6ec627f9b1cb24866802e15c49c22a", hex.EncodeToString(verifyData)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "3e935190ba34184cc7bf61a54e030b0ec229292b3025c14c3ef7672b259521cf27c007dc51295c1fe2e05882128a62ef03fb30aaaa4415505929eac7f35424f2a5979717c35155300b0b2d68610aacdd8b0dbb94990168103bfd62985732e3f682370c91c9f2b8f08c6398194e2bb18b83eae765cef6e4e991d91e631dd454953516721962a089a03e4d8f640cd115ede836bad7141e8094317a45ccd04ec842", hex.EncodeToString(verifyData)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "30ba77ffda1712a0cfbbfce9facbc25a2370dc67d6480c686da47b7f181d527e132f281fd2bc8409114826d70e3148c93b9b4fee7b21c7680e750b3b0c5f6df2aa4fe1ee5d7af73b27b10c68f66f4c3700ffe684aa0593cd19690e8075303ca7d395e6d0add8aa5e3e668820713c3377a8bf6769fc8bef4d141ac117962ae0fc2e2606862b3542e5e9b6197f9dcd8a4b126a08b160da6ade484dd4cc1c7be4be", hex.EncodeToString(verifyData)) - - // 15 chunks - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "1bc420092ec4e0af62e7a9243dd6a39ee1341e33032647d3edc16fb4dea5f60a0fad18d05f6f7d57b03dc717f8409489806d89ee5044bea951538682c52d815097e898dbd9a99b1bae2d759ee5f77ac6b6e8fb2cddaf26500532270fd4066e7ae85c450bcbf2cdb4643147091a1ee11ca615b823c97a69cb716d80de6ccafc5823af3a17fc71b72c224edd387abbf4433af013b53f15f394e501e5a3e57af074", hex.EncodeToString(verifyData)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - verifyData, err = batch.BlobDataProofForPointEvaluation() - assert.NoError(t, err) - assert.Equal(t, "1bea70cbdd3d088c0db7d3dd5a11a2934ec4e7db761195d1e62f9f38a2fd5b325910eea5d881106c394f8d9a80bac8ecc43a86e0b920c5dc93f89caa43b205c2880cc02297edda15b6a14c4481fd15db8209aa52b80aecde6fce0592093eaf0d813c2f081eacb1efa9a8030191e1b780b421b0df42cc64da5e466af6f8cbc20afcb993e6d217440b5b21f2be91abe8620e1518780aa2005ec0a80cb947ebfef9", hex.EncodeToString(verifyData)) -} - -func TestCodecV3BatchL1MessagePopped(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch) - assert.NoError(t, err) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV3ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(237), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(237), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2933), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2933), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(54), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(54), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3149), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(54), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3186), batch5BlobSize) -} - -func TestCodecV3ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV3DABatchJSONMarshalUnmarshal(t *testing.T) { - t.Run("Case 1", func(t *testing.T) { - jsonStr := `{ - "version": 3, - "batch_index": 293212, - "l1_message_popped": 7, - "total_l1_message_popped": 904750, - "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", - "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", - "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", - "last_block_timestamp": 1721130505, - "blob_data_proof": [ - "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", - "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(3), batch.Version) - assert.Equal(t, uint64(293212), batch.BatchIndex) - assert.Equal(t, uint64(7), batch.L1MessagePopped) - assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x6c693817a272efd00dd1323a533a114bd0a8c63b55816fde36c5784a4125441d") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 2", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 123, - "l1_message_popped": 0, - "total_l1_message_popped": 0, - "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", - "last_block_timestamp": 1720174236, - "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", - "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", - "blob_data_proof": [ - "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", - "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(123), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) - assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x005661faf2444824b8a3fe1a53958195b197436a0df81b5d1677287bcd1c1923") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 3", func(t *testing.T) { - jsonStr := `{ - "version": 3, - "batch_index": 293205, - "l1_message_popped": 0, - "total_l1_message_popped": 904737, - "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", - "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", - "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", - "last_block_timestamp": 1721129563, - "blob_data_proof": [ - "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", - "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(3), batch.Version) - assert.Equal(t, uint64(293205), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0xe86e067f78b1c29c1cc297f6d9fe670c7beea1eebb226d1b8eeb9616a2bcac7e") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/codecv4/codecv4.go b/encoding/codecv4.go similarity index 70% rename from encoding/codecv4/codecv4.go rename to encoding/codecv4.go index b07e2be..d750127 100644 --- a/encoding/codecv4/codecv4.go +++ b/encoding/codecv4.go @@ -1,4 +1,4 @@ -package codecv4 +package encoding import ( "crypto/sha256" @@ -7,6 +7,7 @@ import ( "errors" "fmt" "math/big" + "sync/atomic" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -14,22 +15,24 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv3" "github.com/scroll-tech/da-codec/encoding/zstd" ) -// MaxNumChunks is the maximum number of chunks that a batch can contain. -const MaxNumChunks = codecv3.MaxNumChunks +type DACodecV4 struct { + enableCompress uint32 +} + +// Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv4MaxNumChunks = 45 -// DABlock represents a Data Availability Block. -type DABlock = codecv3.DABlock +// DABlockV4 represents a Data Availability Block. +type DABlockV4 = DABlockV3 -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunk = codecv3.DAChunk +// DAChunkV4 groups consecutive DABlocks with their transactions. +type DAChunkV4 = DAChunkV3 -// DABatch contains metadata about a batch of DAChunks. -type DABatch struct { +// DABatchV4 contains metadata about a batch of DAChunks. +type DABatchV4 struct { // header Version uint8 `json:"version"` BatchIndex uint64 `json:"batch_index"` @@ -49,20 +52,20 @@ type DABatch struct { blobBytes []byte } -// NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { - return codecv3.NewDABlock(block, totalL1MessagePoppedBefore) +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { + return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) } -// NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (*DAChunk, error) { - return codecv3.NewDAChunk(chunk, totalL1MessagePoppedBefore) +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + return (&DACodecV3{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) } -// NewDABatch creates a DABatch from the provided encoding.Batch. -func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > MaxNumChunks { + if len(batch.Chunks) > Codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") } @@ -75,19 +78,19 @@ func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { } // batch data hash - dataHash, err := ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } // blob payload - blob, blobVersionedHash, z, blobBytes, err := ConstructBlobPayload(batch.Chunks, enableCompress, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -95,8 +98,8 @@ func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatch{ - Version: uint8(encoding.CodecV4), + daBatch := DABatchV4{ + Version: uint8(CodecV4), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, TotalL1MessagePopped: totalL1MessagePoppedAfter, @@ -121,21 +124,21 @@ func NewDABatch(batch *encoding.Batch, enableCompress bool) (*DABatch, error) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func ComputeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return codecv3.ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV4) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV3{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) } // ConstructBlobPayload constructs the 4844 blob payload. -func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) - metadataLength := 2 + MaxNumChunks*4 + metadataLength := 2 + Codecv4MaxNumChunks*4 // batchBytes represents the raw (un-compressed and un-padded) blob payload batchBytes := make([]byte, metadataLength) // challenge digest preimage // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash - challengePreimage := make([]byte, (1+MaxNumChunks+1)*32) + challengePreimage := make([]byte, (1+Codecv4MaxNumChunks+1)*32) // the chunk data hash used for calculating the challenge preimage var chunkDataHash common.Hash @@ -155,7 +158,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // encode L2 txs into blob payload - rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -173,10 +176,10 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } - // if we have fewer than MaxNumChunks chunks, the rest + // if we have fewer than Codecv4MaxNumChunks chunks, the rest // of the blob metadata is correctly initialized to 0, // but we need to add padding to the challenge preimage - for chunkID := len(chunks); chunkID < MaxNumChunks; chunkID++ { + for chunkID := len(chunks); chunkID < Codecv4MaxNumChunks; chunkID++ { // use the last chunk's data hash as padding copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) } @@ -186,7 +189,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock copy(challengePreimage[0:], hash[:]) var blobBytes []byte - if enableCompress { + if o.isCompressEnabled() { // blobBytes represents the compressed blob payload (batchBytes) var err error blobBytes, err = zstd.CompressScrollBatchBytes(batchBytes) @@ -195,7 +198,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } if !useMockTxData { // Check compressed data compatibility. - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -211,7 +214,7 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock } // convert raw data to BLSFieldElements - blob, err := encoding.MakeBlobCanonical(blobBytes) + blob, err := MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -224,11 +227,11 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) // challenge: append blob versioned hash - copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:]) + copy(challengePreimage[(1+Codecv4MaxNumChunks)*32:], blobVersionedHash[:]) // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -241,12 +244,12 @@ func ConstructBlobPayload(chunks []*encoding.Chunk, enableCompress bool, useMock // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func NewDABatchFromBytes(data []byte) (*DABatch, error) { +func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - b := &DABatch{ + b := &DABatchV4{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), @@ -265,7 +268,7 @@ func NewDABatchFromBytes(data []byte) (*DABatch, error) { } // Encode serializes the DABatch into bytes. -func (b *DABatch) Encode() []byte { +func (b *DABatchV4) Encode() []byte { batchBytes := make([]byte, 193) batchBytes[0] = b.Version binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) @@ -281,13 +284,13 @@ func (b *DABatch) Encode() []byte { } // Hash computes the hash of the serialized DABatch. -func (b *DABatch) Hash() common.Hash { +func (b *DABatchV4) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *DABatchV4) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -312,7 +315,7 @@ func (b *DABatch) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *DABatchV4) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -336,7 +339,7 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { // | bytes32 | bytes32 | bytes48 | bytes48 | values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := encoding.GetBlobDataProofArgs() + blobDataProofArgs, err := GetBlobDataProofArgs() if err != nil { return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) } @@ -344,23 +347,23 @@ func (b *DABatch) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatch) Blob() *kzg4844.Blob { +func (b *DABatchV4) Blob() *kzg4844.Blob { return b.blob } // BlobBytes returns the blob bytes of the batch. -func (b *DABatch) BlobBytes() []byte { +func (b *DABatchV4) BlobBytes() []byte { return b.blobBytes } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableCompress { + if o.isCompressEnabled() { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -369,17 +372,17 @@ func EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress bool) (uint64, uint64, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } var blobBytesLength uint64 - if enableCompress { + if o.isCompressEnabled() { blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) if err != nil { return 0, 0, err @@ -388,12 +391,12 @@ func EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch, enableCompress } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, MaxNumChunks) +func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -401,7 +404,7 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { if err != nil { return false, err } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -409,8 +412,8 @@ func CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { - batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, MaxNumChunks) +func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -418,7 +421,7 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { if err != nil { return false, err } - if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -426,21 +429,35 @@ func CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) uint64 { - return codecv3.EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { + return (&DACodecV3{}).EstimateChunkL1CommitCalldataSize(c) } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func EstimateBatchL1CommitCalldataSize(b *encoding.Batch) uint64 { - return codecv3.EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { + return (&DACodecV3{}).EstimateBatchL1CommitCalldataSize(b) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func EstimateChunkL1CommitGas(c *encoding.Chunk) uint64 { - return codecv3.EstimateChunkL1CommitGas(c) +func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { + return (&DACodecV3{}).EstimateChunkL1CommitGas(c) } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func EstimateBatchL1CommitGas(b *encoding.Batch) uint64 { - return codecv3.EstimateBatchL1CommitGas(b) +func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { + return (&DACodecV3{}).EstimateBatchL1CommitGas(b) +} + +// isCompressEnabled checks if compression is enabled. +func (o *DACodecV4) isCompressEnabled() bool { + return atomic.LoadUint32(&o.enableCompress) == 1 +} + +// SetCompression enables or disables compression. +func (o *DACodecV4) SetCompression(enable bool) { + if enable { + atomic.StoreUint32(&o.enableCompress, 1) + } else { + atomic.StoreUint32(&o.enableCompress, 0) + } } diff --git a/encoding/codecv4/codecv4_test.go b/encoding/codecv4/codecv4_test.go deleted file mode 100644 index a824c64..0000000 --- a/encoding/codecv4/codecv4_test.go +++ /dev/null @@ -1,837 +0,0 @@ -package codecv4 - -import ( - "encoding/hex" - "encoding/json" - "os" - "testing" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/scroll-tech/da-codec/encoding" - "github.com/scroll-tech/da-codec/encoding/codecv0" -) - -func TestCodecV4BlockEncode(t *testing.T) { - block := &DABlock{} - encoded := hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - block, err := NewDABlock(trace2, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - block, err = NewDABlock(trace3, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "00000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - block, err = NewDABlock(trace4, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - block, err = NewDABlock(trace5, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - block, err = NewDABlock(trace6, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - block, err = NewDABlock(trace7, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(block.Encode()) - assert.Equal(t, "000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) - - // sanity check: v0 and v4 block encodings are identical - for _, trace := range []*encoding.Block{trace2, trace3, trace4, trace5, trace6, trace7} { - blockv0, err := codecv0.NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv0 := hex.EncodeToString(blockv0.Encode()) - - blockv4, err := NewDABlock(trace, 0) - assert.NoError(t, err) - encodedv4 := hex.EncodeToString(blockv4.Encode()) - - assert.Equal(t, encodedv0, encodedv4) - } -} - -func TestCodecV4ChunkEncode(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - encoded := hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - // transactions are not part of the encoding - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType}, &types.TransactionData{Type: types.DynamicFeeTxType}) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err := NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000020000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "0100000000000000030000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000000d00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a1200000c000b", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200002a002a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a1200000a000a", encoded) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - encoded = hex.EncodeToString(chunk.Encode()) - assert.Equal(t, "01000000000000001100000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120001010101", encoded) -} - -func TestCodecV4ChunkHash(t *testing.T) { - // chunk with a single empty block - block := DABlock{} - chunk := &DAChunk{Blocks: []*DABlock{&block}, Transactions: [][]*types.TransactionData{nil}} - hash, err := chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x7cdb9d7f02ea58dfeb797ed6b4f7ea68846e4f2b0e30ed1535fc98b60c4ec809", hash.Hex()) - - // L1 transactions are part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // L2 transactions are not part of the hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.DynamicFeeTxType, TxHash: "0x0000000000000000000000000000000000000000000000000000000000000000"}) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // numL1Messages are not part of the hash - chunk.Blocks[0].NumL1Messages = 1 - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xdcb42a70c54293e75a19dd1303d167822182d78b361dd7504758c35e516871b2", hash.Hex()) - - // invalid hash - chunk.Transactions[0] = append(chunk.Transactions[0], &types.TransactionData{Type: types.L1MessageTxType, TxHash: "0xg"}) - _, err = chunk.Hash() - assert.Error(t, err) - - trace := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - originalChunk := &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x820f25d806ddea0ccdbfa463ee480da5b6ea3906e8a658417fb5417d0f837f5c", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_03.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x4620b3900e8454133448b677cbb2054c5dd61d467d7ebf752bfb12cffff90f40", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_04.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x059c6451e83012b405c7e1a38818369012a4a1c87d7d699366eac946d0410d73", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_05.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x854fc3136f47ce482ec85ee3325adfa16a1a1d60126e1c119eaaf0c3a9e90f8e", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_06.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0x2aa220ca7bd1368e59e8053eb3831e30854aa2ec8bd3af65cee350c1c0718ba6", hash.Hex()) - - trace = readBlockFromJSON(t, "../testdata/blockTrace_07.json") - originalChunk = &encoding.Chunk{Blocks: []*encoding.Block{trace}} - chunk, err = NewDAChunk(originalChunk, 0) - assert.NoError(t, err) - hash, err = chunk.Hash() - assert.NoError(t, err) - assert.Equal(t, "0xb65521bea7daff75838de07951c3c055966750fb5a270fead5e0e727c32455c3", hash.Hex()) -} - -func TestCodecV4BatchEncode(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV4)} - encoded := hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", encoded) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000000000000000000009f81f6879f121da5b7a37535cdb21b3d53099266de57b1fdf603ce32100ed54101e5c897e0f98f6addd6c99bb51ff927cde93851b0d407aae3d7d5de75a31f2900000000000000000000000000000000000000000000000000000000000000000000000063807b2a26451ed31542ed15543973f8bc8c3b6382ba0cba5650a7faf14625377029203c1b6db22aa24613cb68dee10ca50bbbc88fc15b8a6abf9dcf3ad382a2642e480d", encoded) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000000000000000000000000d46d19f6d48083dc7905a68e6a20ea6a8fbcd445d56b549b324a8485b5b574a601ad8c8eee24cc98ab1ca9c0a4c92bf20f488f06dedbc22f1312bd389df7105000000000000000000000000000000000000000000000000000000000000000000000000063807b2d30702c0ea39553a0601a9c6fc5b27c076ddfc1044001fb0a8ad1fd9016304a61233de2770e0fb9a5578e5f633846ef9fa4c2ab8b80b8f9a30f09be07cda8d725", encoded) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000b000000000000000bcaece1705bf2ce5e94154469d910ffe8d102419c5eb3152c0c6d237cf35c885f01c6a9a7d06425dbfad42697e4ce5bc8562d7c5ffe1f62d57fcb51240e33af93000000000000000000000000000000000000000000000000000000000000000000000000646b6e1338122423f3cebb92645f9ac93c8ee50edb75ea93a951f278007e721a7b9f995824895b00195499dfe77d201cf3627050d866abb2685f87e10466c4fcaf3a8588", encoded) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a93255aa24dd468c5645f1e6901b8131a7a78a0eeb2a17cbb09ba64688a8de6b4016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000000a000000000000000ac7bcc8da943dd83404e84d9ce7e894ab97ce4829df4eb51ebbbe13c90b5a3f4d016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "04000000000000000000000000000001010000000000000101899a411a3309c6491701b7b955c7b1115ac015414bbb71b59a0ca561668d5208016ac24dabb9e1bbb3ec3c65b50a829564c2f56160ba92fbdb03ed7e4a0c439a000000000000000000000000000000000000000000000000000000000000000000000000646b6ed004e124536a56f650b0994e58647e59087bf99ecadbd7bc730ad6290f229fb0715885a06aad250ef3594c65a7a6a0e282175b1ad4d8b4063dac48e282bb5a9213", encoded) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002ae7740182b0948139505b6b296d0c6c6f7717708323e6e687917acad823b559d80113ba3d5c53a035f4b4ec6f8a2ba9ab521bccab9f90e3a713ab5fffc0adec57000000000000000000000000000000000000000000000000000000000000000000000000646b6ed012e49b70b64652e5cab5dfdd1f58958d863de1d7fcb959e09f147a98b0b895171560f81b17ec3a2fe1c8ed2d308ca5bf002d7e3c18db9682a8d0f5379bf213aa", encoded) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - encoded = hex.EncodeToString(batch.Encode()) - assert.Equal(t, "040000000000000000000000000000002a000000000000002a9b0f37c563d27d9717ab16d47075df996c54fe110130df6b11bfd7230e1347670121388d141bd439af8447db5d00bacbfe1587fea6581f795e98588d95ba7f26000000000000000000000000000000000000000000000000000000000000000000000000646b6ed046aedf214a661b6b37b9c325fef4484ff3613a6fb52719609bf02a66bc7ba23b6e9b7bcbe3be0ba95654f16f715bf7e39ef87a84199340423f6487cf56058085", encoded) -} - -func TestCodecV4BatchHash(t *testing.T) { - // empty batch - batch := &DABatch{Version: uint8(encoding.CodecV4)} - assert.Equal(t, "0xdaf0827d02b32d41458aea0d5796dd0072d0a016f9834a2cb1a964d2c6ee135c", batch.Hash().Hex()) - - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x53d6da35c9b6f0413b6ebb80f4a8c19b0e3279481ddf602398a54d3b4e5d4f2c", batch.Hash().Hex()) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x08feefdb19215bb0f51f85a3b02a0954ac7da67681e274db49b9102f4c6e0857", batch.Hash().Hex()) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0xc56c5e51993342232193d1d93124bae30a5b1444eebf49b2dd5f2c5962d4d54d", batch.Hash().Hex()) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x2c32177c8b4c6289d977361c7fd0f1a6ea15add64da2eb8caf0420ac9b35231e", batch.Hash().Hex()) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x909bebbebdbf5ba9c85c6894e839c0b044d2878c457c4942887e3d64469ad342", batch.Hash().Hex()) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, "0x53765a37bbd72655df586b530d79cb4ad0fb814d72ddc95e01e0ede579f45117", batch.Hash().Hex()) - - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk3, chunk4, chunk5}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x74ccf9cc265f423cc6e6e53ed294000637a832cdc93c76485855289bebb6764a", batch.Hash().Hex()) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, "0x8d5ee00a80d7dbdc083d0cdedd35c2cb722e5944f9d88f7450c9186f3ef3da44", batch.Hash().Hex()) -} - -func TestCodecV4ChunkAndBatchCommitGasEstimation(t *testing.T) { - block2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}} - chunk2Gas := EstimateChunkL1CommitGas(chunk2) - assert.Equal(t, uint64(51124), chunk2Gas) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2Gas := EstimateBatchL1CommitGas(batch2) - assert.Equal(t, uint64(207649), batch2Gas) - - block3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{block3}} - chunk3Gas := EstimateChunkL1CommitGas(chunk3) - assert.Equal(t, uint64(51124), chunk3Gas) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3Gas := EstimateBatchL1CommitGas(batch3) - assert.Equal(t, uint64(207649), batch3Gas) - - block4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk4Gas := EstimateChunkL1CommitGas(chunk4) - assert.Equal(t, uint64(53745), chunk4Gas) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4Gas := EstimateBatchL1CommitGas(batch4) - assert.Equal(t, uint64(210302), batch4Gas) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{block2, block3}} - chunk5Gas := EstimateChunkL1CommitGas(chunk5) - assert.Equal(t, uint64(52202), chunk5Gas) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{block4}} - chunk6Gas := EstimateChunkL1CommitGas(chunk6) - assert.Equal(t, uint64(53745), chunk6Gas) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5Gas := EstimateBatchL1CommitGas(batch5) - assert.Equal(t, uint64(213087), batch5Gas) -} - -func repeat(element byte, count int) string { - result := make([]byte, 0, count) - for i := 0; i < count; i++ { - result = append(result, element) - } - return "0x" + common.Bytes2Hex(result) -} - -func TestCodecV4BatchStandardTestCases(t *testing.T) { - // Taking into consideration compression, we allow up to 5x of max blob bytes. - // We then ignore the metadata rows for 45 chunks. - maxChunks := 45 - nRowsData := 5*126976 - (maxChunks*4 + 2) - - for _, tc := range []struct { - chunks [][]string - expectedz string - expectedy string - expectedBlobVersionedHash string - expectedBatchHash string - }{ - // single empty chunk - {chunks: [][]string{{}}, expectedz: "1517a7f04a9f2517aaad8440792de202bd1fef70a861e12134c882ccf0c5a537", expectedy: "1ff0c5ea938308566ab022bc30d0136792084dc9adca93612ec925411915d4a9", expectedBlobVersionedHash: "015f16731c3e7864a08edae95f11db8c96e39a487427d7e58b691745d87f8a21", expectedBatchHash: "c3cfeead404a6de1ec5feaa29b6c1c1a5e6a40671c5d5e9cf1dd86fdf5a2e44a"}, - // single non-empty chunk - {chunks: [][]string{{"0x010203"}}, expectedz: "2cbd5fb174611060e72a2afcc385cea273b0f5ea8656f04f3661d757a6b00ff9", expectedy: "68d653e973d32fc5b79763d1b7de1699f37e2527830331b1a02f39d58d7070a9", expectedBlobVersionedHash: "019de38b4472451c5e8891dbb01bc2e834d660198cb9878e6b94fb55e4aaf92b", expectedBatchHash: "41e1c4a5220feb7fed5ba9e3980d138b8d5b4b06b8a46a87d796dbf5ed9265f5"}, - // multiple empty chunks - {chunks: [][]string{{}, {}}, expectedz: "0f9270fd0f21c1eef46334614c586759a2fb71ae46fef50560e92ef7ec926ccc", expectedy: "028f18fc74210d214d3e78a5f92f5c68a9d4dcc633e6e7ffb4144651a39b9dce", expectedBlobVersionedHash: "014a46e5be597971d313e300a052dc406b9f06fad394e1ba115df7da9ca5746d", expectedBatchHash: "94cac32609ae6c3d99dacf5af3650a7748b4dcf8c9779353b932a75e85bc2632"}, - // multiple non-empty chunks - {chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "3a199bd64627e67c320add8a5932870535c667236eda365c989f0b73176bb000", expectedy: "221d60db4912e9067df77ee3d71587ea1023ec0238c23044a3325f909fd5ceb3", expectedBlobVersionedHash: "0145df6dbf8070bb3137156fe4540c11330e84487fcac24239442859d95e925c", expectedBatchHash: "d2332749a82a3b94766493ee3826074b8af74efc98367d14fd82e1056e2abf88"}, - // empty chunk followed by non-empty chunk - {chunks: [][]string{{}, {"0x010203"}}, expectedz: "0a421d448784eb111c2ae9a8031a7cf79e4638b300c48d0c7ff38322e25268fc", expectedy: "48ad5516b1370ac6be17a1d3220e286c9522366ec36fc66a584bbe1ee904eaf1", expectedBlobVersionedHash: "019e5c4c0bfa68324657a0d2e49075eeee2e7c928811bc9c8b2c03888d9d3a5d", expectedBatchHash: "5eac258323d1a4d166d2d116b330262440f46f1ecf07b247cc792bca4a905761"}, - // non-empty chunk followed by empty chunk - {chunks: [][]string{{"0x070809"}, {}}, expectedz: "6aa26c5d595fa1b72c4e1aa4f06b35788060a7504137c7dd6896486819445230", expectedy: "72c082827841ab84576b49cd63bd06af07cb090626ea3e91a8e77de29b3e61dc", expectedBlobVersionedHash: "0166c93797bf7d4e5701d36bfc8bcea5270c1c4ff18d1aaa248125c87746cf3d", expectedBatchHash: "03e0bdf053fa21d37bf55ac27e7774298b95465123c353e30761e51965269a10"}, - // max number of chunks all empty - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "4a04cb1860de2c0d03a78520da62a447ef2af92e36dc0b1806db501d7cf63469", expectedy: "17ca30439aed3d9a96f4336d2a416da04a0803667922c7b0765557bb0162493f", expectedBlobVersionedHash: "014b8172c9e2ef89ac8d2ff0c9991baafff3602459250f5870721ac4f05dca09", expectedBatchHash: "216add0492703b12b841ebf6d217a41d1907dd4acd54d07a870472d31d4fde0d"}, - // max number of chunks all non-empty - {chunks: [][]string{ - {"0x0a"}, - {"0x0a0b"}, - {"0x0a0b0c"}, - {"0x0a0b0c0d"}, - {"0x0a0b0c0d0e"}, - {"0x0a0b0c0d0e0f"}, - {"0x0a0b0c0d0e0f10"}, - {"0x0a0b0c0d0e0f1011"}, - {"0x0a0b0c0d0e0f101112"}, - {"0x0a0b0c0d0e0f10111213"}, - {"0x0a0b0c0d0e0f1011121314"}, - {"0x0a0b0c0d0e0f101112131415"}, - {"0x0a0b0c0d0e0f10111213141516"}, - {"0x0a0b0c0d0e0f1011121314151617"}, - {"0x0a0b0c0d0e0f101112131415161718"}, - {"0x0a0b0c0d0e0f10111213141516171819"}, - {"0x0a0b0c0d0e0f101112131415161718191a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f2021222324252627"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212223242526272829"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031323334"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435"}, - {"0x0a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f30313233343536"}, - }, expectedz: "53eafb50809b3473cb4f8764f7e5d598af9eaaddc45a5a6da7cddac3380e39bb", expectedy: "40751ed98861f5c2058b4062b275f94a3d505a3221f6abe8dbe1074a4f10d0f4", expectedBlobVersionedHash: "01b78b07dbe03b960cd73ea45088b231a50ce88408fa938765e971c5dc7bbb6b", expectedBatchHash: "257175785213c68b10bb94396b657892fb7ae70708bf98ce357752906a80a6f0"}, - // single chunk blob full - {chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "37ca5366d9f5ddd9471f074f8019050ea6a13097368e84f298ffa1bd806ad851", expectedy: "5aa602da97cc438a039431c799b5f97467bcd45e693273dd1215f201b19fa5bd", expectedBlobVersionedHash: "01e531e7351a271839b2ae6ddec58818efd5f426fd6a7c0bc5c33c9171ed74bf", expectedBatchHash: "d3809d6b2fd10a62c6c58f9e7c32772f4ac062a78d363f46cd3ee301e87dbad2"}, - // multiple chunks blob full - {chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "250fc907e7ba3b5affb90a624566e337b02dd89a265677571cc0d1c51b60af19", expectedy: "1b2898bb001d962717159f49b015ae7228b21e9a590f836be0d79a0870c7d82b", expectedBlobVersionedHash: "01f3c431a72bbfd43c42dbd638d7f6d109be2b9449b96386b214f92b9e28ccc4", expectedBatchHash: "a51631991f6210b13e9c8ac9260704cca29fdc08adcfbd210053dc77c956e82f"}, - // max number of chunks only last one non-empty not full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "6ba09c6123b374f1828ce5b3e52c69ac7e2251f1a573ba4d51e71b386eef9c38", expectedy: "3104f9e81ecf4ade3281cc8ea68c4f451341388e2a2c84be4b5e5ed938b6bb26", expectedBlobVersionedHash: "017813036e3c57d5259d5b1d89ca0fe253e43d740f5ee287eabc916b3486f15d", expectedBatchHash: "ebfaf617cc91d9147b00968263993f70e0efc57c1189877092a87ea60b55a2d7"}, - // max number of chunks only last one non-empty full blob - {chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "295f6ba39b866f6635a1e11ffe16badf42174ba120bdcb973806620370f665fc", expectedy: "553772861d517aefd58332d87d75a388523b40dbd69c1d73b7d78fd18d895513", expectedBlobVersionedHash: "013a5cb4a098dfa068b82acea202eac5c7b1ec8f16c7cb37b2a9629e7359a4b1", expectedBatchHash: "b4c58eb1be9b2b21f6a43b4170ee92d6ee0af46e20848fff508a07d40b2bac29"}, - // max number of chunks but last is empty - {chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "4affa105e7c5d72a3223482b237296fead99e6d716b97bab0cb3447f93309692", expectedy: "4a850a8c7b84d568d8505121c92ebf284e88aa7a881290cf3939d52040871e56", expectedBlobVersionedHash: "01d3ce566fbdbcab307095bdc05de7bc2905d25f3dd4453b0f7d5f7ba8da9f08", expectedBatchHash: "ac29c2e8c26749cf99fca994cde6d33147e9e9aa60f162c964720b4937cae8fb"}, - } { - chunks := []*encoding.Chunk{} - - for _, c := range tc.chunks { - block := &encoding.Block{Transactions: []*types.TransactionData{}} - - for _, data := range c { - tx := &types.TransactionData{Type: 0xff, Data: data} - block.Transactions = append(block.Transactions, tx) - } - - chunk := &encoding.Chunk{Blocks: []*encoding.Block{block}} - chunks = append(chunks, chunk) - } - - blob, blobVersionedHash, z, _, err := ConstructBlobPayload(chunks, true /* enble encode */, true /* use mock */) - require.NoError(t, err) - actualZ := hex.EncodeToString(z[:]) - assert.Equal(t, tc.expectedz, actualZ) - assert.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) - - _, y, err := kzg4844.ComputeProof(blob, *z) - require.NoError(t, err) - actualY := hex.EncodeToString(y[:]) - assert.Equal(t, tc.expectedy, actualY) - - // Note: this is a dummy dataHash (for each chunk, we use 0xff00..0000) - dataBytes := make([]byte, 32*len(chunks)) - for i := range chunks { - copy(dataBytes[32*i:32*i+32], []byte{255 - uint8(i), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) - } - dataHash := crypto.Keccak256Hash(dataBytes) - - batch := DABatch{ - Version: uint8(encoding.CodecV4), - BatchIndex: 6789, - L1MessagePopped: 101, - TotalL1MessagePopped: 10101, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: common.BytesToHash([]byte{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}), - LastBlockTimestamp: 192837, - blob: blob, - z: z, - } - - batch.BlobDataProof, err = batch.blobDataProofForPICircuit() - require.NoError(t, err) - - assert.Equal(t, common.HexToHash(tc.expectedBatchHash), batch.Hash()) - } -} - -func TestCodecV4BatchL1MessagePopped(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch, err := NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 0, int(batch.L1MessagePopped)) - assert.Equal(t, 0, int(batch.TotalL1MessagePopped)) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 11, int(batch.L1MessagePopped)) // skip 10, include 1 - assert.Equal(t, 11, int(batch.TotalL1MessagePopped)) - - trace5 := readBlockFromJSON(t, "../testdata/blockTrace_05.json") - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 37 - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 5, int(batch.L1MessagePopped)) // skip 37, include 5 - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - trace6 := readBlockFromJSON(t, "../testdata/blockTrace_06.json") - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 10, int(batch.L1MessagePopped)) // skip 7, include 3 - assert.Equal(t, 10, int(batch.TotalL1MessagePopped)) - - trace7 := readBlockFromJSON(t, "../testdata/blockTrace_07.json") - chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}} - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}} - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 257, int(batch.L1MessagePopped)) // skip 255, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 1 - batch, err = NewDABatch(originalBatch, false /* disable encode */) - assert.NoError(t, err) - assert.Equal(t, 256, int(batch.L1MessagePopped)) // skip 254, include 2 - assert.Equal(t, 257, int(batch.TotalL1MessagePopped)) - - chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}} // queue index 10 - chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}} // queue index 37-41 - originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}} - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 42, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) - - originalBatch.TotalL1MessagePoppedBefore = 10 - batch, err = NewDABatch(originalBatch, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, 32, int(batch.L1MessagePopped)) - assert.Equal(t, 42, int(batch.TotalL1MessagePopped)) -} - -func TestCodecV4ChunkAndBatchBlobSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2BatchBytesSize, chunk2BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk2, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(412), chunk2BatchBytesSize) - assert.Equal(t, uint64(238), chunk2BlobSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2BatchBytesSize, batch2BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch2, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(412), batch2BatchBytesSize) - assert.Equal(t, uint64(238), batch2BlobSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3BatchBytesSize, chunk3BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk3, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), chunk3BatchBytesSize) - assert.Equal(t, uint64(2934), chunk3BlobSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3BatchBytesSize, batch3BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch3, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(5863), batch3BatchBytesSize) - assert.Equal(t, uint64(2934), batch3BlobSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4BatchBytesSize, chunk4BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk4, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk4BatchBytesSize) - assert.Equal(t, uint64(55), chunk4BlobSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - blob4BatchBytesSize, batch4BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch4, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), blob4BatchBytesSize) - assert.Equal(t, uint64(55), batch4BlobSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5BatchBytesSize, chunk5BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk5, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(6093), chunk5BatchBytesSize) - assert.Equal(t, uint64(3150), chunk5BlobSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6BatchBytesSize, chunk6BlobSize, err := EstimateChunkL1CommitBatchSizeAndBlobSize(chunk6, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(214), chunk6BatchBytesSize) - assert.Equal(t, uint64(55), chunk6BlobSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5BatchBytesSize, batch5BlobSize, err := EstimateBatchL1CommitBatchSizeAndBlobSize(batch5, true /* enble encode */) - assert.NoError(t, err) - assert.Equal(t, uint64(6125), batch5BatchBytesSize) - assert.Equal(t, uint64(3187), batch5BlobSize) -} - -func TestCodecV4ChunkAndBatchCalldataSizeEstimation(t *testing.T) { - trace2 := readBlockFromJSON(t, "../testdata/blockTrace_02.json") - chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{trace2}} - chunk2CalldataSize := EstimateChunkL1CommitCalldataSize(chunk2) - assert.Equal(t, uint64(60), chunk2CalldataSize) - batch2 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}} - batch2CalldataSize := EstimateBatchL1CommitCalldataSize(batch2) - assert.Equal(t, uint64(60), batch2CalldataSize) - - trace3 := readBlockFromJSON(t, "../testdata/blockTrace_03.json") - chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}} - chunk3CalldataSize := EstimateChunkL1CommitCalldataSize(chunk3) - assert.Equal(t, uint64(60), chunk3CalldataSize) - batch3 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}} - batch3CalldataSize := EstimateBatchL1CommitCalldataSize(batch3) - assert.Equal(t, uint64(60), batch3CalldataSize) - - trace4 := readBlockFromJSON(t, "../testdata/blockTrace_04.json") - chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk4CalldataSize := EstimateChunkL1CommitCalldataSize(chunk4) - assert.Equal(t, uint64(60), chunk4CalldataSize) - batch4 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}} - batch4CalldataSize := EstimateBatchL1CommitCalldataSize(batch4) - assert.Equal(t, uint64(60), batch4CalldataSize) - - chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3}} - chunk5CalldataSize := EstimateChunkL1CommitCalldataSize(chunk5) - assert.Equal(t, uint64(120), chunk5CalldataSize) - chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}} - chunk6CalldataSize := EstimateChunkL1CommitCalldataSize(chunk6) - assert.Equal(t, uint64(60), chunk6CalldataSize) - batch5 := &encoding.Batch{Chunks: []*encoding.Chunk{chunk5, chunk6}} - batch5CalldataSize := EstimateBatchL1CommitCalldataSize(batch5) - assert.Equal(t, uint64(180), batch5CalldataSize) -} - -func TestCodecV4DABatchJSONMarshalUnmarshal(t *testing.T) { - t.Run("Case 1", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 293212, - "l1_message_popped": 7, - "total_l1_message_popped": 904750, - "data_hash": "0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450", - "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", - "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee", - "last_block_timestamp": 1721130505, - "blob_data_proof": [ - "0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e", - "0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(293212), batch.BatchIndex) - assert.Equal(t, uint64(7), batch.L1MessagePopped) - assert.Equal(t, uint64(904750), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xa261ff31f8f78c19f65d14d6394eb911d53a3a3add9a9691b211caa5809be450"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721130505), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x496b144866cffedfd71423639984bf0d9ad4309ff7e35693f1baef3cdaf1471e"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x5eba7d42db109bfa124d1bc4dbcb421944b8aae6eae13a9d55eb460ce402785b"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x64ba42153a4f642b2d8a37cf74a53067c37bba7389b85e7e07521f584e6b73d0") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 2", func(t *testing.T) { - jsonStr := `{ - "version": 5, - "batch_index": 123, - "l1_message_popped": 0, - "total_l1_message_popped": 0, - "parent_batch_hash": "0xabacadaeaf000000000000000000000000000000000000000000000000000000", - "last_block_timestamp": 1720174236, - "data_hash": "0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6", - "blob_versioned_hash": "0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1", - "blob_data_proof": [ - "0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899", - "0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(5), batch.Version) - assert.Equal(t, uint64(123), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(0), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0xabacadaeaf000000000000000000000000000000000000000000000000000000"), batch.ParentBatchHash) - assert.Equal(t, uint64(1720174236), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0xa1a518fa8e636dcb736629c296ed10341536c4cf850a3bc0a808d8d66d7f1ee6"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x01c61b784ba4cd0fd398717fdc3470729d1a28d70632d520174c9e47614c80e1"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x1ee03153fd007529c214a68934b2cfd51e8586bd142e157564328946a0fc8899"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x118e196a9432c84c53db5a5a7bfbe13ef1ff8ffdba12fbccaf6360110eb71a10"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0xd14f142dbc5c384e9920d5bf82c6bbf7c98030ffd7a3cace6c8a6e9639a285f9") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) - - t.Run("Case 3", func(t *testing.T) { - jsonStr := `{ - "version": 4, - "batch_index": 293205, - "l1_message_popped": 0, - "total_l1_message_popped": 904737, - "data_hash": "0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf", - "blob_versioned_hash": "0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae", - "parent_batch_hash": "0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0", - "last_block_timestamp": 1721129563, - "blob_data_proof": [ - "0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54", - "0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da" - ] - }` - - var batch DABatch - err := json.Unmarshal([]byte(jsonStr), &batch) - require.NoError(t, err) - - assert.Equal(t, uint8(4), batch.Version) - assert.Equal(t, uint64(293205), batch.BatchIndex) - assert.Equal(t, uint64(0), batch.L1MessagePopped) - assert.Equal(t, uint64(904737), batch.TotalL1MessagePopped) - assert.Equal(t, common.HexToHash("0x053c0f8b8bea2f7f98dd9dcdc743f1059ca664b2b72a21381b7184dd8aa922e0"), batch.ParentBatchHash) - assert.Equal(t, uint64(1721129563), batch.LastBlockTimestamp) - assert.Equal(t, common.HexToHash("0x84786e890c015721a37f02a010bd2b84eaf4363cdf04831628a38ddbf497d0bf"), batch.DataHash) - assert.Equal(t, common.HexToHash("0x013c7e2c9ee9cd6511e8952e55ce5568832f8be3864de823d4ead5f6dfd382ae"), batch.BlobVersionedHash) - assert.Equal(t, common.HexToHash("0x519fb200d451fea8623ea1bdb15d8138cea68712792a92b9cf1f79dae6df5b54"), batch.BlobDataProof[0]) - assert.Equal(t, common.HexToHash("0x6d50a85330192c8e835cbd6bcdff0f2f23b0b3822e4e0319c92dafd70f0e21da"), batch.BlobDataProof[1]) - - batchHash := batch.Hash() - - expectedHash := common.HexToHash("0x19638ca802926b93946fe281666205958838d46172587d150ca4c720ae244cd3") - assert.Equal(t, expectedHash, batchHash, "Batch hash does not match expected value") - - // Marshal and Unmarshal test - data, err := json.Marshal(&batch) - require.NoError(t, err) - - var decodedBatch DABatch - err = json.Unmarshal(data, &decodedBatch) - require.NoError(t, err) - - assert.Equal(t, batch, decodedBatch) - }) -} - -func readBlockFromJSON(t *testing.T, filename string) *encoding.Block { - data, err := os.ReadFile(filename) - assert.NoError(t, err) - - block := &encoding.Block{} - assert.NoError(t, json.Unmarshal(data, block)) - return block -} diff --git a/encoding/da.go b/encoding/da.go index eb66b7c..b55f79e 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -19,26 +19,6 @@ var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80 // CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. const CalldataNonZeroByteGas = 16 -// CodecVersion defines the version of encoder and decoder. -type CodecVersion uint8 - -const ( - // CodecV0 represents the version 0 of the encoder and decoder. - CodecV0 CodecVersion = iota - - // CodecV1 represents the version 1 of the encoder and decoder. - CodecV1 - - // CodecV2 represents the version 2 of the encoder and decoder. - CodecV2 - - // CodecV3 represents the version 3 of the encoder and decoder. - CodecV3 - - // CodecV4 represents the version 4 of the encoder and decoder. - CodecV4 -) - // Block represents an L2 block. type Block struct { Header *types.Header @@ -461,3 +441,11 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { memoryCost := (memorySizeWord*memorySizeWord)/512 + (3 * memorySizeWord) return memoryCost } + +func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + if err != nil { + return 0, err + } + return uint64(len(rlpTxData)), nil +} diff --git a/encoding/encoding.go b/encoding/encoding.go index db3b027..8d165eb 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -1,6 +1,8 @@ package encoding import ( + "fmt" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -13,7 +15,7 @@ type DABlock interface { // DAChunk groups consecutive DABlocks with their transactions. type DAChunk interface { - Encode() []byte + Encode() ([]byte, error) Hash() (common.Hash, error) } @@ -33,17 +35,43 @@ type Codec interface { NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) - ComputeBatchDataHash([]*Chunk, uint64) (common.Hash, error) - ConstructBlobPayload([]*Chunk, bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) - EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) CheckBatchCompressedDataCompatibility(*Batch) (bool, error) - EstimateChunkL1CommitCalldataSize(*Chunk) uint64 - EstimateChunkL1CommitGas(*Chunk) uint64 - EstimateBatchL1CommitGas(*Batch) uint64 - EstimateBatchL1CommitCalldataSize(*Batch) uint64 + EstimateChunkL1CommitCalldataSize(*Chunk) (uint64, error) + EstimateChunkL1CommitGas(*Chunk) (uint64, error) + EstimateBatchL1CommitGas(*Batch) (uint64, error) + EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) SetCompression(enable bool) // only used for codecv4 } + +// CodecVersion represents the version of the codec. +type CodecVersion int + +const ( + CodecV0 CodecVersion = iota + CodecV1 + CodecV2 + CodecV3 + CodecV4 +) + +// GetCodec returns the appropriate codec for the given version. +func GetCodec(version CodecVersion) (Codec, error) { + switch version { + case CodecV0: + return &DACodecV0{}, nil + case CodecV1: + return &DACodecV1{}, nil + case CodecV2: + return &DACodecV2{}, nil + case CodecV3: + return &DACodecV3{}, nil + case CodecV4: + return &DACodecV4{}, nil + default: + return nil, fmt.Errorf("unsupported codec version: %d", version) + } +} diff --git a/encoding/zstd/zstd.go b/encoding/zstd/zstd.go index 58eab2b..feab982 100644 --- a/encoding/zstd/zstd.go +++ b/encoding/zstd/zstd.go @@ -5,6 +5,7 @@ package zstd char* compress_scroll_batch_bytes(uint8_t* src, uint64_t src_size, uint8_t* output_buf, uint64_t *output_buf_size); */ import "C" + import ( "fmt" "unsafe" From 879bb98de49ca4792ddaf0ca1c19de1f1560024f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 20:44:22 +0800 Subject: [PATCH 16/46] add dablock.go --- encoding/codecv0.go | 57 +++++--------------------------------------- encoding/codecv1.go | 13 +++------- encoding/codecv2.go | 5 +--- encoding/codecv3.go | 5 +--- encoding/codecv4.go | 5 +--- encoding/dablock.go | 47 ++++++++++++++++++++++++++++++++++++ encoding/encoding.go | 6 ----- 7 files changed, 59 insertions(+), 79 deletions(-) create mode 100644 encoding/dablock.go diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 9a92879..3cceb18 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "math" - "math/big" "strings" "github.com/scroll-tech/go-ethereum/common" @@ -17,19 +16,9 @@ import ( type DACodecV0 struct{} -// DABlock represents a Data Availability Block. -type DABlockV0 struct { - BlockNumber uint64 - Timestamp uint64 - BaseFee *big.Int - GasLimit uint64 - NumTransactions uint16 - NumL1Messages uint16 -} - // DAChunk groups consecutive DABlocks with their transactions. type DAChunkV0 struct { - Blocks []*DABlockV0 + Blocks []*DABlock Transactions [][]*types.TransactionData } @@ -45,7 +34,7 @@ type DABatchV0 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -63,7 +52,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := DABlockV0{ + daBlock := &DABlock{ BlockNumber: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, @@ -72,42 +61,12 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) NumL1Messages: uint16(numL1Messages), } - return &daBlock, nil -} - -// Encode serializes the DABlock into a slice of bytes. -func (b *DABlockV0) Encode() []byte { - bytes := make([]byte, 60) - binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) - binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) - if b.BaseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) - } - binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) - return bytes -} - -// Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockV0) Decode(bytes []byte) error { - if len(bytes) != 60 { - return errors.New("block encoding is not 60 bytes long") - } - - b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) - b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) - - return nil + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []*DABlockV0 + var blocks []*DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -127,11 +86,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) if err != nil { return nil, err } - blockData, ok := b.(*DABlockV0) - if !ok { - return nil, errors.New("failed to cast block data") - } - blocks = append(blocks, blockData) + blocks = append(blocks, b) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5a1d87d..0ac7d58 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -20,9 +20,6 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 -// DABlockV1 represents a Data Availability Block. -type DABlockV1 = DABlockV0 - // DAChunkV1 groups consecutive DABlocks with their transactions. type DAChunkV1 DAChunkV0 @@ -44,7 +41,7 @@ type DABatchV1 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) } @@ -58,7 +55,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []*DABlockV1 + var blocks []*DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -66,11 +63,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) if err != nil { return nil, err } - blockData, ok := b.(*DABlockV1) - if !ok { - return nil, errors.New("failed to cast block data") - } - blocks = append(blocks, blockData) + blocks = append(blocks, b) totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) txs = append(txs, block.Transactions) } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index e592304..02cb6f1 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -22,9 +22,6 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 -// DABlockV2 represents a Data Availability Block. -type DABlockV2 = DABlockV1 - // DAChunkV2 groups consecutive DABlocks with their transactions. type DAChunkV2 = DAChunkV1 @@ -46,7 +43,7 @@ type DABatchV2 struct { } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index c6fcc79..2eed735 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -16,9 +16,6 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 -// DABlockV3 represents a Data Availability Block. -type DABlockV3 = DABlockV2 - // DAChunkV3 groups consecutive DABlocks with their transactions. type DAChunkV3 = DAChunkV2 @@ -44,7 +41,7 @@ type DABatchV3 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index d750127..0d2864e 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,9 +25,6 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 -// DABlockV4 represents a Data Availability Block. -type DABlockV4 = DABlockV3 - // DAChunkV4 groups consecutive DABlocks with their transactions. type DAChunkV4 = DAChunkV3 @@ -53,7 +50,7 @@ type DABatchV4 struct { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/dablock.go b/encoding/dablock.go new file mode 100644 index 0000000..7ae69ab --- /dev/null +++ b/encoding/dablock.go @@ -0,0 +1,47 @@ +package encoding + +import ( + "encoding/binary" + "errors" + "math/big" +) + +// DABlock represents a Data Availability Block. +type DABlock struct { + BlockNumber uint64 + Timestamp uint64 + BaseFee *big.Int + GasLimit uint64 + NumTransactions uint16 + NumL1Messages uint16 +} + +// Encode serializes the DABlock into a slice of bytes. +func (b *DABlock) Encode() []byte { + bytes := make([]byte, 60) + binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) + binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) + if b.BaseFee != nil { + binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) + } + binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) + binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) + binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) + return bytes +} + +// Decode populates the fields of a DABlock from a byte slice. +func (b *DABlock) Decode(bytes []byte) error { + if len(bytes) != 60 { + return errors.New("block encoding is not 60 bytes long") + } + + b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) + b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) + b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) + b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) + b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) + b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) + + return nil +} diff --git a/encoding/encoding.go b/encoding/encoding.go index 8d165eb..1783439 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -7,12 +7,6 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABlock represents a Data Availability Block. -type DABlock interface { - Encode() []byte - Decode([]byte) error -} - // DAChunk groups consecutive DABlocks with their transactions. type DAChunk interface { Encode() ([]byte, error) From 77aafd422158aad079253e621086fea6feab8fb0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 21:26:56 +0800 Subject: [PATCH 17/46] add dachunk.go --- encoding/codecv0.go | 91 ------------------------ encoding/codecv1.go | 51 -------------- encoding/codecv2.go | 3 - encoding/codecv3.go | 3 - encoding/codecv4.go | 3 - encoding/dachunk.go | 161 +++++++++++++++++++++++++++++++++++++++++++ encoding/encoding.go | 2 +- 7 files changed, 162 insertions(+), 152 deletions(-) create mode 100644 encoding/dachunk.go diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 3cceb18..b0a57a4 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -2,11 +2,9 @@ package encoding import ( "encoding/binary" - "encoding/hex" "errors" "fmt" "math" - "strings" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -16,12 +14,6 @@ import ( type DACodecV0 struct{} -// DAChunk groups consecutive DABlocks with their transactions. -type DAChunkV0 struct { - Blocks []*DABlock - Transactions [][]*types.TransactionData -} - // DABatch contains metadata about a batch of DAChunks. type DABatchV0 struct { Version uint8 @@ -99,89 +91,6 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return &daChunk, nil } -// Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunkV0) Encode() ([]byte, error) { - if len(c.Blocks) == 0 { - return nil, errors.New("number of blocks is 0") - } - - if len(c.Blocks) > 255 { - return nil, errors.New("number of blocks exceeds 1 byte") - } - - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) - - var l2TxDataBytes []byte - - for _, block := range c.Blocks { - chunkBytes = append(chunkBytes, block.Encode()...) - } - - for _, blockTxs := range c.Transactions { - for _, txData := range blockTxs { - if txData.Type == types.L1MessageTxType { - continue - } - - var txLen [4]byte - rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) - if err != nil { - return nil, err - } - binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) - l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) - l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) - } - } - - chunkBytes = append(chunkBytes, l2TxDataBytes...) - return chunkBytes, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *DAChunkV0) Hash() (common.Hash, error) { - chunkBytes, err := c.Encode() - if err != nil { - return common.Hash{}, err - } - - if len(chunkBytes) == 0 { - return common.Hash{}, errors.New("chunk data is empty and cannot be processed") - } - numBlocks := chunkBytes[0] - - // concatenate block contexts - var dataBytes []byte - for i := 0; i < int(numBlocks); i++ { - // only the first 58 bytes of each BlockContext are needed for the hashing process - dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) - } - - // concatenate l1 and l2 tx hashes - for _, blockTxs := range c.Transactions { - var l1TxHashes []byte - var l2TxHashes []byte - for _, txData := range blockTxs { - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) - } - if txData.Type == types.L1MessageTxType { - l1TxHashes = append(l1TxHashes, hashBytes...) - } else { - l2TxHashes = append(l2TxHashes, hashBytes...) - } - } - dataBytes = append(dataBytes, l1TxHashes...) - dataBytes = append(dataBytes, l2TxHashes...) - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // compute batch data hash diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 0ac7d58..b4efc4d 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -7,7 +7,6 @@ import ( "errors" "fmt" "math/big" - "strings" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -20,9 +19,6 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 -// DAChunkV1 groups consecutive DABlocks with their transactions. -type DAChunkV1 DAChunkV0 - // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { // header @@ -76,53 +72,6 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return &daChunk, nil } -// Encode serializes the DAChunk into a slice of bytes. -func (c *DAChunkV1) Encode() ([]byte, error) { - var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) - - for _, block := range c.Blocks { - blockBytes := block.Encode() - chunkBytes = append(chunkBytes, blockBytes...) - } - - return chunkBytes, nil -} - -// Hash computes the hash of the DAChunk data. -func (c *DAChunkV1) Hash() (common.Hash, error) { - var dataBytes []byte - - // concatenate block contexts - for _, block := range c.Blocks { - encodedBlock := block.Encode() - // only the first 58 bytes are used in the hashing process - dataBytes = append(dataBytes, encodedBlock[:58]...) - } - - // concatenate l1 tx hashes - for _, blockTxs := range c.Transactions { - for _, txData := range blockTxs { - if txData.Type != types.L1MessageTxType { - continue - } - - txHash := strings.TrimPrefix(txData.TxHash, "0x") - hashBytes, err := hex.DecodeString(txHash) - if err != nil { - return common.Hash{}, err - } - if len(hashBytes) != 32 { - return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) - } - dataBytes = append(dataBytes, hashBytes...) - } - } - - hash := crypto.Keccak256Hash(dataBytes) - return hash, nil -} - // NewDABatch creates a DABatch from the provided Batch. func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 02cb6f1..3c13824 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -22,9 +22,6 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 -// DAChunkV2 groups consecutive DABlocks with their transactions. -type DAChunkV2 = DAChunkV1 - // DABatch contains metadata about a batch of DAChunks. type DABatchV2 struct { // header diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 2eed735..1d577cd 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -16,9 +16,6 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 -// DAChunkV3 groups consecutive DABlocks with their transactions. -type DAChunkV3 = DAChunkV2 - // DABatchV3 contains metadata about a batch of DAChunks. type DABatchV3 struct { // header diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0d2864e..69228a3 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,9 +25,6 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 -// DAChunkV4 groups consecutive DABlocks with their transactions. -type DAChunkV4 = DAChunkV3 - // DABatchV4 contains metadata about a batch of DAChunks. type DABatchV4 struct { // header diff --git a/encoding/dachunk.go b/encoding/dachunk.go new file mode 100644 index 0000000..d4533df --- /dev/null +++ b/encoding/dachunk.go @@ -0,0 +1,161 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + "strings" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" +) + +// DAChunk groups consecutive DABlocks with their transactions. +type DAChunkV0 struct { + Blocks []*DABlock + Transactions [][]*types.TransactionData +} + +// Encode serializes the DAChunk into a slice of bytes. +func (c *DAChunkV0) Encode() ([]byte, error) { + if len(c.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(c.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + + var l2TxDataBytes []byte + + for _, block := range c.Blocks { + chunkBytes = append(chunkBytes, block.Encode()...) + } + + for _, blockTxs := range c.Transactions { + for _, txData := range blockTxs { + if txData.Type == types.L1MessageTxType { + continue + } + + var txLen [4]byte + rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) + if err != nil { + return nil, err + } + binary.BigEndian.PutUint32(txLen[:], uint32(len(rlpTxData))) + l2TxDataBytes = append(l2TxDataBytes, txLen[:]...) + l2TxDataBytes = append(l2TxDataBytes, rlpTxData...) + } + } + + chunkBytes = append(chunkBytes, l2TxDataBytes...) + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *DAChunkV0) Hash() (common.Hash, error) { + chunkBytes, err := c.Encode() + if err != nil { + return common.Hash{}, err + } + + if len(chunkBytes) == 0 { + return common.Hash{}, errors.New("chunk data is empty and cannot be processed") + } + numBlocks := chunkBytes[0] + + // concatenate block contexts + var dataBytes []byte + for i := 0; i < int(numBlocks); i++ { + // only the first 58 bytes of each BlockContext are needed for the hashing process + dataBytes = append(dataBytes, chunkBytes[1+60*i:60*i+59]...) + } + + // concatenate l1 and l2 tx hashes + for _, blockTxs := range c.Transactions { + var l1TxHashes []byte + var l2TxHashes []byte + for _, txData := range blockTxs { + txHash := strings.TrimPrefix(txData.TxHash, "0x") + hashBytes, err := hex.DecodeString(txHash) + if err != nil { + return common.Hash{}, fmt.Errorf("failed to decode tx hash from TransactionData: hash=%v, err=%w", txData.TxHash, err) + } + if txData.Type == types.L1MessageTxType { + l1TxHashes = append(l1TxHashes, hashBytes...) + } else { + l2TxHashes = append(l2TxHashes, hashBytes...) + } + } + dataBytes = append(dataBytes, l1TxHashes...) + dataBytes = append(dataBytes, l2TxHashes...) + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// DAChunkV1 groups consecutive DABlocks with their transactions. +type DAChunkV1 DAChunkV0 + +// Encode serializes the DAChunk into a slice of bytes. +func (c *DAChunkV1) Encode() ([]byte, error) { + var chunkBytes []byte + chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + + for _, block := range c.Blocks { + blockBytes := block.Encode() + chunkBytes = append(chunkBytes, blockBytes...) + } + + return chunkBytes, nil +} + +// Hash computes the hash of the DAChunk data. +func (c *DAChunkV1) Hash() (common.Hash, error) { + var dataBytes []byte + + // concatenate block contexts + for _, block := range c.Blocks { + encodedBlock := block.Encode() + // only the first 58 bytes are used in the hashing process + dataBytes = append(dataBytes, encodedBlock[:58]...) + } + + // concatenate l1 tx hashes + for _, blockTxs := range c.Transactions { + for _, txData := range blockTxs { + if txData.Type != types.L1MessageTxType { + continue + } + + txHash := strings.TrimPrefix(txData.TxHash, "0x") + hashBytes, err := hex.DecodeString(txHash) + if err != nil { + return common.Hash{}, err + } + if len(hashBytes) != 32 { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) + } + dataBytes = append(dataBytes, hashBytes...) + } + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + +// DAChunkV2 groups consecutive DABlocks with their transactions. +type DAChunkV2 = DAChunkV1 + +// DAChunkV3 groups consecutive DABlocks with their transactions. +type DAChunkV3 = DAChunkV2 + +// DAChunkV4 groups consecutive DABlocks with their transactions. +type DAChunkV4 = DAChunkV3 diff --git a/encoding/encoding.go b/encoding/encoding.go index 1783439..7bbdaac 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -24,7 +24,7 @@ type DABatch interface { // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { - NewDABlock(*Block, uint64) (DABlock, error) + NewDABlock(*Block, uint64) (*DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) From 6ee5c197524e4273c5f49b7726765644a8e2b77e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 21:58:18 +0800 Subject: [PATCH 18/46] add dabatch.go --- encoding/codecv0.go | 46 -------- encoding/codecv1.go | 112 +------------------ encoding/codecv2.go | 104 ++---------------- encoding/codecv3.go | 130 +--------------------- encoding/codecv4.go | 130 ++-------------------- encoding/da.go | 26 +++++ encoding/dabatch.go | 255 ++++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 299 insertions(+), 504 deletions(-) create mode 100644 encoding/dabatch.go diff --git a/encoding/codecv0.go b/encoding/codecv0.go index b0a57a4..7b0b5cd 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -9,22 +9,10 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) type DACodecV0 struct{} -// DABatch contains metadata about a batch of DAChunks. -type DABatchV0 struct { - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { if !block.Header.Number.IsUint64() { @@ -152,40 +140,6 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV0) Encode() []byte { - batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.ParentBatchHash[:]) - copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV0) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// Blob returns the blob of the batch. -func (b *DABatchV0) Blob() *kzg4844.Blob { - return nil -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV0) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 diff --git a/encoding/codecv1.go b/encoding/codecv1.go index b4efc4d..0bc60a6 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -3,7 +3,6 @@ package encoding import ( "crypto/sha256" "encoding/binary" - "encoding/hex" "errors" "fmt" "math/big" @@ -19,23 +18,6 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 -// DABatchV1 contains metadata about a batch of DAChunks. -type DABatchV1 struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -84,7 +66,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -117,31 +99,6 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV1) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} - // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) @@ -252,73 +209,6 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV1) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV1) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatchV1) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV1) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV1) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 3c13824..81b8897 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -22,23 +22,6 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 -// DABatch contains metadata about a batch of DAChunks. -type DABatchV2 struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point -} - // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -61,7 +44,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -73,7 +56,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, _, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, _, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -94,16 +77,8 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV2) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV1{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV2) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv2MaxNumChunks*4 @@ -172,13 +147,13 @@ func (o *DACodecV2) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (* if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } } if len(blobBytes) > 126976 { - log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } @@ -232,73 +207,6 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV2) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV2) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// BlobDataProof computes the abi-encoded blob verification data. -func (b *DABatchV2) BlobDataProof() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProof with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProof with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV2) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV2) BlobBytes() []byte { - return nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 1d577cd..e60248b 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -2,12 +2,10 @@ package encoding import ( "encoding/binary" - "encoding/hex" "errors" "fmt" "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -16,27 +14,6 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 -// DABatchV3 contains metadata about a batch of DAChunks. -type DABatchV3 struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point - - // for batch task - blobBytes []byte -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -63,7 +40,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -75,7 +52,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -105,17 +82,9 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV3) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV2{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV3) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return (&DACodecV2{}).ConstructBlobPayload(chunks, useMockTxData) +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + return (&DACodecV2{}).constructBlobPayload(chunks, useMockTxData) } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -143,95 +112,6 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV3) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV3) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { - if b.blob == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") - } - if b.z == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") - } - - _, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of result: - // | z | y | - // |---------|---------| - // | bytes32 | bytes32 | - var result [2]common.Hash - result[0] = common.BytesToHash(b.z[:]) - result[1] = common.BytesToHash(y[:]) - - return result, nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV3) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV3) BlobBytes() []byte { - return b.blobBytes -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { return (&DACodecV2{}).EstimateChunkL1CommitBatchSizeAndBlobSize(c) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 69228a3..f4d1426 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -25,27 +25,6 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 -// DABatchV4 contains metadata about a batch of DAChunks. -type DABatchV4 struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` - - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point - - // for batch task - blobBytes []byte -} - // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) @@ -72,7 +51,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := o.ComputeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -84,7 +63,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.ConstructBlobPayload(batch.Chunks, false /* no mock */) + blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) if err != nil { return nil, err } @@ -114,16 +93,8 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } -// ComputeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV4) ComputeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV3{}).ComputeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// ConstructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv4MaxNumChunks*4 @@ -193,7 +164,7 @@ func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (* if !useMockTxData { // Check compressed data compatibility. if err = CheckCompressedDataCompatibility(blobBytes); err != nil { - log.Error("ConstructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } } @@ -203,7 +174,7 @@ func (o *DACodecV4) ConstructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } if len(blobBytes) > 126976 { - log.Error("ConstructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") } @@ -261,95 +232,6 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return b, nil } -// Encode serializes the DABatch into bytes. -func (b *DABatchV4) Encode() []byte { - batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) - return batchBytes -} - -// Hash computes the hash of the serialized DABatch. -func (b *DABatchV4) Hash() common.Hash { - bytes := b.Encode() - return crypto.Keccak256Hash(bytes) -} - -// blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV4) blobDataProofForPICircuit() ([2]common.Hash, error) { - if b.blob == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") - } - if b.z == nil { - return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") - } - - _, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of result: - // | z | y | - // |---------|---------| - // | bytes32 | bytes32 | - var result [2]common.Hash - result[0] = common.BytesToHash(b.z[:]) - result[1] = common.BytesToHash(y[:]) - - return result, nil -} - -// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV4) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, errors.New("failed to create blob commitment") - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - // Memory layout of ``_blobDataProof``: - // | z | y | kzg_commitment | kzg_proof | - // |---------|---------|----------------|-----------| - // | bytes32 | bytes32 | bytes48 | bytes48 | - - values := []interface{}{*b.z, y, commitment, proof} - blobDataProofArgs, err := GetBlobDataProofArgs() - if err != nil { - return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) - } - return blobDataProofArgs.Pack(values...) -} - -// Blob returns the blob of the batch. -func (b *DABatchV4) Blob() *kzg4844.Blob { - return b.blob -} - -// BlobBytes returns the blob bytes of the batch. -func (b *DABatchV4) BlobBytes() []byte { - return b.blobBytes -} - // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) diff --git a/encoding/da.go b/encoding/da.go index b55f79e..61d8c03 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -10,6 +10,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -449,3 +450,28 @@ func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { } return uint64(len(rlpTxData)), nil } + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/dabatch.go b/encoding/dabatch.go new file mode 100644 index 0000000..8c00bbb --- /dev/null +++ b/encoding/dabatch.go @@ -0,0 +1,255 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "errors" + "fmt" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// DABatch contains metadata about a batch of DAChunks. +type DABatchV0 struct { + Version uint8 + BatchIndex uint64 + L1MessagePopped uint64 + TotalL1MessagePopped uint64 + DataHash common.Hash + ParentBatchHash common.Hash + SkippedL1MessageBitmap []byte +} + +// Encode serializes the DABatch into bytes. +func (b *DABatchV0) Encode() []byte { + batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) + copy(batchBytes[25:], b.DataHash[:]) + copy(batchBytes[57:], b.ParentBatchHash[:]) + copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatchV0) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// Blob returns the blob of the batch. +func (b *DABatchV0) Blob() *kzg4844.Blob { + return nil +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV0) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + +// DABatchV1 contains metadata about a batch of DAChunks. +type DABatchV1 struct { + // header + Version uint8 + BatchIndex uint64 + L1MessagePopped uint64 + TotalL1MessagePopped uint64 + DataHash common.Hash + BlobVersionedHash common.Hash + ParentBatchHash common.Hash + SkippedL1MessageBitmap []byte + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point +} + +// Encode serializes the DABatch into bytes. +func (b *DABatchV1) Encode() []byte { + batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) + copy(batchBytes[25:], b.DataHash[:]) + copy(batchBytes[57:], b.BlobVersionedHash[:]) + copy(batchBytes[89:], b.ParentBatchHash[:]) + copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatchV1) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// BlobDataProof computes the abi-encoded blob verification data. +func (b *DABatchV1) BlobDataProof() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProof with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProof with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatchV1) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV1) BlobBytes() []byte { + return nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { + return nil, nil +} + +type DABatchV2 = DABatchV1 + +// DABatchV3 contains metadata about a batch of DAChunks. +type DABatchV3 struct { + // header + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash common.Hash `json:"data_hash"` + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + ParentBatchHash common.Hash `json:"parent_batch_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` + + // blob payload + blob *kzg4844.Blob + z *kzg4844.Point + + // for batch task + blobBytes []byte +} + +// Encode serializes the DABatch into bytes. +func (b *DABatchV3) Encode() []byte { + batchBytes := make([]byte, 193) + batchBytes[0] = b.Version + binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) + copy(batchBytes[25:57], b.DataHash[:]) + copy(batchBytes[57:89], b.BlobVersionedHash[:]) + copy(batchBytes[89:121], b.ParentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) + copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *DABatchV3) Hash() common.Hash { + bytes := b.Encode() + return crypto.Keccak256Hash(bytes) +} + +// blobDataProofForPICircuit computes the abi-encoded blob verification data. +func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { + if b.blob == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") + } + if b.z == nil { + return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty z") + } + + _, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return [2]common.Hash{}, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of result: + // | z | y | + // |---------|---------| + // | bytes32 | bytes32 | + var result [2]common.Hash + result[0] = common.BytesToHash(b.z[:]) + result[1] = common.BytesToHash(y[:]) + + return result, nil +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, errors.New("failed to create blob commitment") + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + // Memory layout of ``_blobDataProof``: + // | z | y | kzg_commitment | kzg_proof | + // |---------|---------|----------------|-----------| + // | bytes32 | bytes32 | bytes48 | bytes48 | + + values := []interface{}{*b.z, y, commitment, proof} + blobDataProofArgs, err := GetBlobDataProofArgs() + if err != nil { + return nil, fmt.Errorf("failed to get blob data proof args, err: %w", err) + } + return blobDataProofArgs.Pack(values...) +} + +// Blob returns the blob of the batch. +func (b *DABatchV3) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *DABatchV3) BlobBytes() []byte { + return b.blobBytes +} + +type DABatchV4 = DABatchV3 From 79422a22e9a73d9d53aae578c926120b8c0d98d9 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 22:07:49 +0800 Subject: [PATCH 19/46] move computeBatchDataHash to codecv --- encoding/codecv1.go | 25 +++++++++++++++++++++++++ encoding/codecv2.go | 10 +++++++++- encoding/codecv3.go | 10 +++++++++- encoding/codecv4.go | 10 +++++++++- encoding/da.go | 26 -------------------------- 5 files changed, 52 insertions(+), 29 deletions(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 0bc60a6..5387f37 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -391,3 +391,28 @@ func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // SetCompression enables or disables compression. func (o *DACodecV1) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 81b8897..5ad0626 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -44,7 +44,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -304,3 +304,11 @@ func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // SetCompression enables or disables compression. func (o *DACodecV2) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV1{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index e60248b..efebe3f 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -40,7 +40,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -162,3 +162,11 @@ func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { // SetCompression enables or disables compression. func (o *DACodecV3) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV2{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index f4d1426..906082d 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -51,7 +51,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -337,3 +337,11 @@ func (o *DACodecV4) SetCompression(enable bool) { atomic.StoreUint32(&o.enableCompress, 0) } } + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + return (&DACodecV3{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +} diff --git a/encoding/da.go b/encoding/da.go index 61d8c03..b55f79e 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -10,7 +10,6 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" - "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) @@ -450,28 +449,3 @@ func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { } return uint64(len(rlpTxData)), nil } - -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - var dataBytes []byte - totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore - - for _, chunk := range chunks { - daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) - if err != nil { - return common.Hash{}, err - } - totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) - chunkHash, err := daChunk.Hash() - if err != nil { - return common.Hash{}, err - } - dataBytes = append(dataBytes, chunkHash.Bytes()...) - } - - dataHash := crypto.Keccak256Hash(dataBytes) - return dataHash, nil -} From 296880e0bd6d67bcf867f3174e1ef9f8446e5525 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 22:25:15 +0800 Subject: [PATCH 20/46] fix --- encoding/codecv1.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5387f37..6235172 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -66,7 +66,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // batch data hash - dataHash, err := computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } From c038850ebe6c9710dd53364559eba9c3ff6c093c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 21 Aug 2024 23:38:59 +0800 Subject: [PATCH 21/46] add DABatchBase --- encoding/bitmap.go | 4 ++-- encoding/codecv0.go | 34 +++++++++++++++++--------------- encoding/codecv1.go | 42 ++++++++++++++++++++++------------------ encoding/codecv2.go | 42 ++++++++++++++++++++++------------------ encoding/codecv3.go | 47 +++++++++++++++++++++++++-------------------- encoding/codecv4.go | 45 ++++++++++++++++++++++++------------------- encoding/dabatch.go | 36 +++++++++++++++------------------- 7 files changed, 133 insertions(+), 117 deletions(-) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index 7ada6d6..da4386e 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -7,8 +7,8 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" ) -// ConstructSkippedBitmap constructs skipped L1 message bitmap of the batch. -func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { +// constructSkippedBitmap constructs skipped L1 message bitmap of the batch. +func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 7b0b5cd..ff3edb8 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -103,19 +103,21 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } daBatch := DABatchV0{ - Version: uint8(CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, + DABatchBase: DABatchBase{ + Version: uint8(CodecV0), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, } return &daBatch, nil @@ -128,13 +130,15 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[57:89]), + SkippedL1MessageBitmap: data[89:], + }, } return b, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 6235172..5308473 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -72,7 +72,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -84,16 +84,18 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV1{ - Version: uint8(CodecV1), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - blob: blob, - z: z, + DABatchBase: DABatchBase{ + Version: uint8(CodecV1), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + blob: blob, + z: z, } return &daBatch, nil @@ -196,14 +198,16 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV1{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + SkippedL1MessageBitmap: data[121:], + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), } return b, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 5ad0626..b496073 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -50,7 +50,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -62,16 +62,18 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV2{ - Version: uint8(CodecV2), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - blob: blob, - z: z, + DABatchBase: DABatchBase{ + Version: uint8(CodecV2), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + blob: blob, + z: z, } return &daBatch, nil @@ -194,14 +196,16 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV2{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + SkippedL1MessageBitmap: data[121:], + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), } return b, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index efebe3f..70fb3c2 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -46,7 +46,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -61,17 +61,20 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV3{ - Version: uint8(CodecV3), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, + DABatchBase: DABatchBase{ + Version: uint8(CodecV3), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -88,21 +91,23 @@ func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields empty. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } b := &DABatchV3{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), BlobDataProof: [2]common.Hash{ common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 906082d..2856ca2 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -57,7 +57,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - _, totalL1MessagePoppedAfter, err := ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -72,17 +72,20 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV4{ - Version: uint8(CodecV4), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - BlobVersionedHash: blobVersionedHash, - ParentBatchHash: batch.ParentBatchHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, + DABatchBase: DABatchBase{ + Version: uint8(CodecV4), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, + }, + BlobVersionedHash: blobVersionedHash, + LastBlockTimestamp: lastBlock.Header.Time, + blob: blob, + z: z, + blobBytes: blobBytes, } daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() @@ -215,14 +218,16 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV4{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - BlobVersionedHash: common.BytesToHash(data[57:89]), - ParentBatchHash: common.BytesToHash(data[89:121]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), + DABatchBase: DABatchBase{ + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[89:121]), + }, + BlobVersionedHash: common.BytesToHash(data[57:89]), + LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), BlobDataProof: [2]common.Hash{ common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 8c00bbb..f284c86 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -11,8 +11,8 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABatch contains metadata about a batch of DAChunks. -type DABatchV0 struct { +// DABatchBase contains common metadata for all versions of DABatch +type DABatchBase struct { Version uint8 BatchIndex uint64 L1MessagePopped uint64 @@ -22,6 +22,11 @@ type DABatchV0 struct { SkippedL1MessageBitmap []byte } +// DABatchV0 contains metadata about a batch of DAChunks. +type DABatchV0 struct { + DABatchBase +} + // Encode serializes the DABatch into bytes. func (b *DABatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) @@ -58,15 +63,9 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { - // header - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - BlobVersionedHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte + DABatchBase + + BlobVersionedHash common.Hash // blob payload blob *kzg4844.Blob @@ -144,16 +143,11 @@ type DABatchV2 = DABatchV1 // DABatchV3 contains metadata about a batch of DAChunks. type DABatchV3 struct { - // header - Version uint8 `json:"version"` - BatchIndex uint64 `json:"batch_index"` - L1MessagePopped uint64 `json:"l1_message_popped"` - TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` - DataHash common.Hash `json:"data_hash"` - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - ParentBatchHash common.Hash `json:"parent_batch_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` + DABatchBase + + BlobVersionedHash common.Hash `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobDataProof [2]common.Hash `json:"blob_data_proof"` // blob payload blob *kzg4844.Blob From 4499e2c752686c7ad5a9233eecca555d24e9b664 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 00:33:25 +0800 Subject: [PATCH 22/46] add GetCodecVersion --- encoding/codecv0.go | 4 ++-- encoding/da.go | 19 ++++++++++++++++++- go.mod | 2 +- go.sum | 4 ++-- 4 files changed, 23 insertions(+), 6 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index ff3edb8..9c7113a 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -152,7 +152,7 @@ func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) continue } size += 4 // 4 bytes payload length - txPayloadLength, err := GetTxPayloadLength(txData) + txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err } @@ -172,7 +172,7 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { continue } - txPayloadLength, err := GetTxPayloadLength(txData) + txPayloadLength, err := getTxPayloadLength(txData) if err != nil { return 0, err } diff --git a/encoding/da.go b/encoding/da.go index b55f79e..e4be4d6 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -11,6 +11,7 @@ import ( "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -442,10 +443,26 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { return memoryCost } -func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { +func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return 0, err } return uint64(len(rlpTxData)), nil } + +// GetCodecVersion determines the codec version based on hain configuration, block number, and timestamp. +func GetCodecVersion(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) CodecVersion { + switch { + case startBlockNumber.Uint64() == 0 || !chainCfg.IsBernoulli(startBlockNumber): + return CodecV0 // codecv0: genesis batch or batches before Bernoulli + case !chainCfg.IsCurie(startBlockNumber): + return CodecV1 // codecv1: batches after Bernoulli and before Curie + case !chainCfg.IsDarwin(startBlockTimestamp): + return CodecV2 // codecv2: batches after Curie and before Darwin + case !chainCfg.IsDarwinV2(startBlockTimestamp): + return CodecV3 // codecv3: batches after Darwin + default: + return CodecV4 // codecv4: batches after DarwinV2 + } +} diff --git a/go.mod b/go.mod index 8d5696e..cdb9440 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/scroll-tech/da-codec go 1.21 require ( - github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4 + github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 29a3574..49f5013 100644 --- a/go.sum +++ b/go.sum @@ -72,8 +72,8 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4 h1:gheWXra3HdZsz6q+w4LrXy8ybHOO6/t6Kb/V64bR5wE= -github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ= +github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e h1:WCJ+UzfrM0jJSirXEYjWCJ89gr5EoRb4KfKb0mo6+Wo= +github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= From 8e763ddc2c0e1f17024d88028805cee07bb35efc Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 00:56:03 +0800 Subject: [PATCH 23/46] add BlobVersionedHashes --- encoding/dabatch.go | 15 +++++++++++++++ encoding/encoding.go | 1 + 2 files changed, 16 insertions(+) diff --git a/encoding/dabatch.go b/encoding/dabatch.go index f284c86..b4b9afa 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -51,6 +51,11 @@ func (b *DABatchV0) Blob() *kzg4844.Blob { return nil } +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *DABatchV0) BlobVersionedHashes() []common.Hash { + return nil +} + // BlobBytes returns the blob bytes of the batch. func (b *DABatchV0) BlobBytes() []byte { return nil @@ -129,6 +134,11 @@ func (b *DABatchV1) Blob() *kzg4844.Blob { return b.blob } +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *DABatchV1) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.BlobVersionedHash} +} + // BlobBytes returns the blob bytes of the batch. func (b *DABatchV1) BlobBytes() []byte { return nil @@ -241,6 +251,11 @@ func (b *DABatchV3) Blob() *kzg4844.Blob { return b.blob } +// BlobVersionedHashes returns the blob versioned hashes of the batch. +func (b *DABatchV3) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.BlobVersionedHash} +} + // BlobBytes returns the blob bytes of the batch. func (b *DABatchV3) BlobBytes() []byte { return b.blobBytes diff --git a/encoding/encoding.go b/encoding/encoding.go index 7bbdaac..24ff3b9 100644 --- a/encoding/encoding.go +++ b/encoding/encoding.go @@ -20,6 +20,7 @@ type DABatch interface { BlobDataProofForPointEvaluation() ([]byte, error) Blob() *kzg4844.Blob BlobBytes() []byte + BlobVersionedHashes() []common.Hash } // Codec represents the interface for encoding and decoding DA-related structures. From 98d5635acc5968b28fb7eb57a14cc719e67b1c8f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 22 Aug 2024 16:08:57 +0800 Subject: [PATCH 24/46] rename encoding.go to interfaces.go --- encoding/{encoding.go => interfaces.go} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename encoding/{encoding.go => interfaces.go} (100%) diff --git a/encoding/encoding.go b/encoding/interfaces.go similarity index 100% rename from encoding/encoding.go rename to encoding/interfaces.go From bdb98f868abee7c15e93075f3cdc37f54c878949 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 23 Aug 2024 01:10:52 +0800 Subject: [PATCH 25/46] add NewDABatchWithExpectedBlobVersionedHashes --- encoding/codecv0.go | 6 ++++++ encoding/codecv1.go | 16 ++++++++++++++++ encoding/codecv2.go | 16 ++++++++++++++++ encoding/codecv3.go | 16 ++++++++++++++++ encoding/codecv4.go | 21 +++++++++++++++++++++ encoding/interfaces.go | 1 + 6 files changed, 76 insertions(+) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 9c7113a..8bf3bce 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -123,6 +123,12 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, _ []common.Hash) (DABatch, error) { + return o.NewDABatch(batch) +} + // NewDABatchFromBytes decodes the given byte slice into a DABatch. func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { if len(data) < 89 { diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 5308473..426472a 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "math/big" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -101,6 +102,21 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index b496073..f3acfe5 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -7,6 +7,7 @@ import ( "errors" "fmt" "math/big" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -79,6 +80,21 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 70fb3c2..b786a49 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -4,6 +4,7 @@ import ( "encoding/binary" "errors" "fmt" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -85,6 +86,21 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { return (&DACodecV2{}).constructBlobPayload(chunks, useMockTxData) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 2856ca2..b34277f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -7,6 +7,7 @@ import ( "errors" "fmt" "math/big" + "reflect" "sync/atomic" "github.com/scroll-tech/go-ethereum/common" @@ -96,6 +97,26 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { return &daBatch, nil } +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + o.SetCompression(true) + daBatch, err := o.NewDABatch(batch) + if err != nil || reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + o.SetCompression(false) + daBatch, err = o.NewDABatch(batch) + if err != nil { + return nil, err + } + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, fmt.Errorf("blob versioned hashes do not match, expected %v, got %v", hashes, daBatch.BlobVersionedHashes()) + } + + return daBatch, nil +} + // constructBlobPayload constructs the 4844 blob payload. func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 24ff3b9..f87e281 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -29,6 +29,7 @@ type Codec interface { NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) + NewDABatchWithExpectedBlobVersionedHashes(*Batch, []common.Hash) (DABatch, error) EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) From 08d60a313219428d51b3691f30f3caf8f29a36d1 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 23 Aug 2024 01:31:08 +0800 Subject: [PATCH 26/46] tweak --- encoding/codecv0.go | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 8bf3bce..ed173bd 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "math" + "reflect" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" @@ -125,8 +126,17 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, _ []common.Hash) (DABatch, error) { - return o.NewDABatch(batch) +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, errors.New("blob versioned hashes do not match") + } + + return daBatch, nil } // NewDABatchFromBytes decodes the given byte slice into a DABatch. From 2d425d8a636a9350201c962a7db85e048589e6ad Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 24 Aug 2024 13:15:42 +0800 Subject: [PATCH 27/46] fix a bug --- encoding/codecv0.go | 2 +- encoding/codecv1.go | 2 +- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/codecv4.go | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index ed173bd..c937bb0 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -132,7 +132,7 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 426472a..faf786c 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -110,7 +110,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f3acfe5..b4ced56 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -88,7 +88,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index b786a49..05c1df7 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -94,7 +94,7 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash return nil, err } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, errors.New("blob versioned hashes do not match") } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index b34277f..0dbe73f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -102,7 +102,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { o.SetCompression(true) daBatch, err := o.NewDABatch(batch) - if err != nil || reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { o.SetCompression(false) daBatch, err = o.NewDABatch(batch) if err != nil { @@ -110,7 +110,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } } - if reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { return nil, fmt.Errorf("blob versioned hashes do not match, expected %v, got %v", hashes, daBatch.BlobVersionedHashes()) } From c1e4a0d0af49b34856720ecebc41ae1e7ec7c0b0 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Sat, 24 Aug 2024 14:48:22 +0800 Subject: [PATCH 28/46] add more logs --- encoding/codecv0.go | 2 +- encoding/codecv1.go | 2 +- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/codecv4.go | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index c937bb0..a0675ae 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -133,7 +133,7 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index faf786c..7785914 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -111,7 +111,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv2.go b/encoding/codecv2.go index b4ced56..a04bad3 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -89,7 +89,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 05c1df7..3bc0a37 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -95,7 +95,7 @@ func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, errors.New("blob versioned hashes do not match") + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0dbe73f..9b88c0f 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -111,7 +111,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match, expected %v, got %v", hashes, daBatch.BlobVersionedHashes()) + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) } return daBatch, nil From e5df84634ee0ece08581bd1069ab7dcfce199f66 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:02:53 +0800 Subject: [PATCH 29/46] add DecodeDAChunks --- encoding/codecv0.go | 42 ++++++++++++++++++++++++++++++++++++------ encoding/codecv1.go | 10 +++++++--- encoding/codecv2.go | 5 +++++ encoding/codecv3.go | 5 +++++ encoding/codecv4.go | 5 +++++ encoding/da.go | 6 ++++++ encoding/dablock.go | 6 +++--- encoding/interfaces.go | 2 ++ 8 files changed, 69 insertions(+), 12 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index a0675ae..f600bf7 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -174,7 +174,7 @@ func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) } size += txPayloadLength } - size += 60 // 60 bytes BlockContext + size += BlockContextByteSize return size, nil } @@ -197,8 +197,7 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += GetKeccak256Gas(txPayloadLength) // l2 tx hash } - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -243,9 +242,9 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * 60 // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil @@ -333,3 +332,34 @@ func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, // SetCompression enables or disables compression. func (o *DACodecV0) SetCompression(enable bool) {} + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + var chunks []DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) + } + + blocks := make([]*DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + BlockContextByteSize + blocks[i] = &DABlock{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, &DAChunkV0{ + Blocks: blocks, + }) + } + return chunks, nil +} diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 7785914..dce865e 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -282,8 +282,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } } - // 60 bytes BlockContext calldata - total += CalldataNonZeroByteGas * 60 + total += CalldataNonZeroByteGas * BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -303,7 +302,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(60 * len(c.Blocks)), nil + return uint64(BlockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. @@ -436,3 +435,8 @@ func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe dataHash := crypto.Keccak256Hash(dataBytes) return dataHash, nil } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV0{}).DecodeDAChunks(bytes) +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index a04bad3..1c9934f 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -332,3 +332,8 @@ func (o *DACodecV2) SetCompression(enable bool) {} func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { return (&DACodecV1{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV1{}).DecodeDAChunks(bytes) +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 3bc0a37..cbbe5a7 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -191,3 +191,8 @@ func (o *DACodecV3) SetCompression(enable bool) {} func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { return (&DACodecV2{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV2{}).DecodeDAChunks(bytes) +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 9b88c0f..4fe10da 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -371,3 +371,8 @@ func (o *DACodecV4) SetCompression(enable bool) { func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { return (&DACodecV3{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) } + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { + return (&DACodecV3{}).DecodeDAChunks(bytes) +} diff --git a/encoding/da.go b/encoding/da.go index 7abdd1a..567579a 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -18,6 +18,12 @@ var BLSModulus = new(big.Int).SetBytes(common.FromHex("0x73eda753299d7d483339d80 // CalldataNonZeroByteGas is the gas consumption per non zero byte in calldata. const CalldataNonZeroByteGas = 16 +// BlockContextByteSize is the size of the block context in bytes. +const BlockContextByteSize = 60 + +// TxLenByteSize is the size of the transaction length in bytes. +const TxLenByteSize = 4 + // Block represents an L2 block. type Block struct { Header *types.Header diff --git a/encoding/dablock.go b/encoding/dablock.go index 7ae69ab..63dbf07 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -18,7 +18,7 @@ type DABlock struct { // Encode serializes the DABlock into a slice of bytes. func (b *DABlock) Encode() []byte { - bytes := make([]byte, 60) + bytes := make([]byte, BlockContextByteSize) binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) if b.BaseFee != nil { @@ -32,8 +32,8 @@ func (b *DABlock) Encode() []byte { // Decode populates the fields of a DABlock from a byte slice. func (b *DABlock) Decode(bytes []byte) error { - if len(bytes) != 60 { - return errors.New("block encoding is not 60 bytes long") + if len(bytes) != BlockContextByteSize { + return errors.New("block encoding is not BlockContextByteSize bytes long") } b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index f87e281..effbd7d 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -31,6 +31,8 @@ type Codec interface { NewDABatchFromBytes([]byte) (DABatch, error) NewDABatchWithExpectedBlobVersionedHashes(*Batch, []common.Hash) (DABatch, error) + DecodeDAChunks(chunks [][]byte) ([]DAChunk, error) + EstimateChunkL1CommitBatchSizeAndBlobSize(*Chunk) (uint64, uint64, error) EstimateBatchL1CommitBatchSizeAndBlobSize(*Batch) (uint64, uint64, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) From ecaca71c61f881e189b2930af280d92e4b809223 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:15:03 +0800 Subject: [PATCH 30/46] add BlockRange interface --- encoding/dachunk.go | 8 ++++++++ encoding/interfaces.go | 1 + 2 files changed, 9 insertions(+) diff --git a/encoding/dachunk.go b/encoding/dachunk.go index d4533df..6462462 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -101,6 +101,14 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { return hash, nil } +func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { + if len(c.Blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil +} + // DAChunkV1 groups consecutive DABlocks with their transactions. type DAChunkV1 DAChunkV0 diff --git a/encoding/interfaces.go b/encoding/interfaces.go index effbd7d..125b0fc 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -11,6 +11,7 @@ import ( type DAChunk interface { Encode() ([]byte, error) Hash() (common.Hash, error) + BlockRange() (uint64, uint64, error) } // DABatch contains metadata about a batch of DAChunks. From 484fa5959ab5c588460378d623a85db18c196679 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:34:17 +0800 Subject: [PATCH 31/46] fix --- encoding/dachunk.go | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/encoding/dachunk.go b/encoding/dachunk.go index 6462462..90f842a 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -101,6 +101,7 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { return hash, nil } +// BlockRange returns the block range of the DAChunk. func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { if len(c.Blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") @@ -159,6 +160,15 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { return hash, nil } +// BlockRange returns the block range of the DAChunk. +func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { + if len(c.Blocks) == 0 { + return 0, 0, errors.New("number of blocks is 0") + } + + return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil +} + // DAChunkV2 groups consecutive DABlocks with their transactions. type DAChunkV2 = DAChunkV1 From f1fe4c86c1d706f32e0805940a9693b99397613b Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 5 Sep 2024 23:59:26 +0800 Subject: [PATCH 32/46] add version check --- encoding/codecv0.go | 4 ++++ encoding/codecv1.go | 4 ++++ encoding/codecv2.go | 4 ++++ encoding/codecv3.go | 4 ++++ encoding/codecv4.go | 4 ++++ 5 files changed, 20 insertions(+) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index f600bf7..8aaa6d1 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -145,6 +145,10 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV0 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) + } + b := &DABatchV0{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv1.go b/encoding/codecv1.go index dce865e..fe47438 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -213,6 +213,10 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV1 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) + } + b := &DABatchV1{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 1c9934f..f01ccf5 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -211,6 +211,10 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV2 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) + } + b := &DABatchV2{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv3.go b/encoding/codecv3.go index cbbe5a7..bef1272 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -113,6 +113,10 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV3 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) + } + b := &DABatchV3{ DABatchBase: DABatchBase{ Version: data[0], diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 4fe10da..0852aa4 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -238,6 +238,10 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } + if CodecVersion(data[0]) != CodecV4 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) + } + b := &DABatchV4{ DABatchBase: DABatchBase{ Version: data[0], From 97711e21ec9486e2fe9c54cabf6e879ad6c88808 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:02:27 +0800 Subject: [PATCH 33/46] add Version --- encoding/codecv0.go | 5 +++++ encoding/codecv1.go | 5 +++++ encoding/codecv2.go | 5 +++++ encoding/codecv3.go | 5 +++++ encoding/codecv4.go | 5 +++++ encoding/interfaces.go | 2 ++ 6 files changed, 27 insertions(+) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 8aaa6d1..4f01925 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -14,6 +14,11 @@ import ( type DACodecV0 struct{} +// Version returns the codec version. +func (o *DACodecV0) Version() CodecVersion { + return CodecV0 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { if !block.Header.Number.IsUint64() { diff --git a/encoding/codecv1.go b/encoding/codecv1.go index fe47438..4ccb14b 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -19,6 +19,11 @@ type DACodecV1 struct{} // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 +// Version returns the codec version. +func (o *DACodecV1) Version() CodecVersion { + return CodecV1 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f01ccf5..db7d122 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -23,6 +23,11 @@ type DACodecV2 struct{} // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 +// Version returns the codec version. +func (o *DACodecV2) Version() CodecVersion { + return CodecV2 +} + // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index bef1272..489aec5 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -15,6 +15,11 @@ type DACodecV3 struct{} // Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv3MaxNumChunks = 45 +// Version returns the codec version. +func (o *DACodecV3) Version() CodecVersion { + return CodecV3 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 0852aa4..8f49468 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -26,6 +26,11 @@ type DACodecV4 struct { // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 +// Version returns the codec version. +func (o *DACodecV4) Version() CodecVersion { + return CodecV4 +} + // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 125b0fc..1082566 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -26,6 +26,8 @@ type DABatch interface { // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { + Version() CodecVersion + NewDABlock(*Block, uint64) (*DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) From 2a63797f7c00370e8f51fe3e7ca9a659a0e47390 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:06:47 +0800 Subject: [PATCH 34/46] remove DABatchBase --- encoding/codecv0.go | 32 ++++++++++++++------------------ encoding/codecv1.go | 4 ++-- encoding/codecv2.go | 4 ++-- encoding/codecv3.go | 4 ++-- encoding/codecv4.go | 4 ++-- encoding/dabatch.go | 13 ++++--------- 6 files changed, 26 insertions(+), 35 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 4f01925..36fa687 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -115,15 +115,13 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV0{ - DABatchBase: DABatchBase{ - Version: uint8(CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, + Version: uint8(CodecV0), + BatchIndex: batch.Index, + L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, + TotalL1MessagePopped: totalL1MessagePoppedAfter, + DataHash: dataHash, + ParentBatchHash: batch.ParentBatchHash, + SkippedL1MessageBitmap: bitmapBytes, } return &daBatch, nil @@ -155,15 +153,13 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV0{ - DABatchBase: DABatchBase{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], - }, + Version: data[0], + BatchIndex: binary.BigEndian.Uint64(data[1:9]), + L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), + TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), + DataHash: common.BytesToHash(data[25:57]), + ParentBatchHash: common.BytesToHash(data[57:89]), + SkippedL1MessageBitmap: data[89:], } return b, nil diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 4ccb14b..4c3db78 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -90,7 +90,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV1{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV1), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -223,7 +223,7 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV1{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/codecv2.go b/encoding/codecv2.go index db7d122..f75a3a1 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -68,7 +68,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } daBatch := DABatchV2{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV2), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -221,7 +221,7 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV2{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 489aec5..788fea1 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -67,7 +67,7 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV3{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV3), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -123,7 +123,7 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV3{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 8f49468..6f9e029 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -78,7 +78,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] daBatch := DABatchV4{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: uint8(CodecV4), BatchIndex: batch.Index, L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, @@ -248,7 +248,7 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } b := &DABatchV4{ - DABatchBase: DABatchBase{ + DABatchV0: DABatchV0{ Version: data[0], BatchIndex: binary.BigEndian.Uint64(data[1:9]), L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 83a27a5..81f0358 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -11,8 +11,8 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) -// DABatchBase contains common metadata for all versions of DABatch -type DABatchBase struct { +// DABatchV0 contains metadata about a batch of DAChunks. +type DABatchV0 struct { Version uint8 BatchIndex uint64 L1MessagePopped uint64 @@ -22,11 +22,6 @@ type DABatchBase struct { SkippedL1MessageBitmap []byte } -// DABatchV0 contains metadata about a batch of DAChunks. -type DABatchV0 struct { - DABatchBase -} - // Encode serializes the DABatch into bytes. func (b *DABatchV0) Encode() []byte { batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) @@ -68,7 +63,7 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { // DABatchV1 contains metadata about a batch of DAChunks. type DABatchV1 struct { - DABatchBase + DABatchV0 BlobVersionedHash common.Hash @@ -143,7 +138,7 @@ type DABatchV2 = DABatchV1 // DABatchV3 contains metadata about a batch of DAChunks. type DABatchV3 struct { - DABatchBase + DABatchV0 BlobVersionedHash common.Hash `json:"blob_versioned_hash"` LastBlockTimestamp uint64 `json:"last_block_timestamp"` From 87c45377ae34e5c255ed723bee3c1398f71d94e4 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:26:51 +0800 Subject: [PATCH 35/46] add DABlock --- encoding/codecv0.go | 10 +++++----- encoding/codecv1.go | 4 ++-- encoding/codecv2.go | 2 +- encoding/codecv3.go | 2 +- encoding/dablock.go | 27 ++++++++++++++++++++------- encoding/dachunk.go | 6 +++--- encoding/interfaces.go | 7 +++++++ 7 files changed, 39 insertions(+), 19 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 36fa687..2a779b7 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -20,7 +20,7 @@ func (o *DACodecV0) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -38,7 +38,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := &DABlock{ + daBlock := &DABlockV0{ BlockNumber: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, @@ -52,7 +52,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []*DABlock + var blocks []DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -351,11 +351,11 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) } - blocks := make([]*DABlock, numBlocks) + blocks := make([]DABlock, numBlocks) for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlock{} + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 4c3db78..f6260d3 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -25,7 +25,7 @@ func (o *DACodecV1) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) } @@ -39,7 +39,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []*DABlock + var blocks []DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { diff --git a/encoding/codecv2.go b/encoding/codecv2.go index f75a3a1..4b11bff 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -29,7 +29,7 @@ func (o *DACodecV2) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 788fea1..6efafae 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -21,7 +21,7 @@ func (o *DACodecV3) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/dablock.go b/encoding/dablock.go index 63dbf07..adda6a0 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,9 +6,9 @@ import ( "math/big" ) -// DABlock represents a Data Availability Block. -type DABlock struct { - BlockNumber uint64 +// DABlockV0 represents a Data Availability Block. +type DABlockV0 struct { + Number uint64 Timestamp uint64 BaseFee *big.Int GasLimit uint64 @@ -17,9 +17,9 @@ type DABlock struct { } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlock) Encode() []byte { +func (b *DABlockV0) Encode() []byte { bytes := make([]byte, BlockContextByteSize) - binary.BigEndian.PutUint64(bytes[0:], b.BlockNumber) + binary.BigEndian.PutUint64(bytes[0:], b.Number) binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) if b.BaseFee != nil { binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) @@ -31,12 +31,12 @@ func (b *DABlock) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlock) Decode(bytes []byte) error { +func (b *DABlockV0) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } - b.BlockNumber = binary.BigEndian.Uint64(bytes[0:8]) + b.Number = binary.BigEndian.Uint64(bytes[0:8]) b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) @@ -45,3 +45,16 @@ func (b *DABlock) Decode(bytes []byte) error { return nil } + +func (b *DABlockV0) BlockNumber() uint64 { + return b.Number +} + +// DABlockV1 represents a Data Availability Block. +type DABlockV1 = DABlockV0 + +// DABlockV2 represents a Data Availability Block. +type DABlockV2 = DABlockV1 + +// DABlockV3 represents a Data Availability Block. +type DABlockV3 = DABlockV2 diff --git a/encoding/dachunk.go b/encoding/dachunk.go index 90f842a..a90c2eb 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -14,7 +14,7 @@ import ( // DAChunk groups consecutive DABlocks with their transactions. type DAChunkV0 struct { - Blocks []*DABlock + Blocks []DABlock Transactions [][]*types.TransactionData } @@ -107,7 +107,7 @@ func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil + return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil } // DAChunkV1 groups consecutive DABlocks with their transactions. @@ -166,7 +166,7 @@ func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber, c.Blocks[len(c.Blocks)-1].BlockNumber, nil + return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil } // DAChunkV2 groups consecutive DABlocks with their transactions. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 1082566..c8cc064 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -7,6 +7,13 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +// DABlock represents a Data Availability Block. +type DABlock interface { + Encode() []byte + Decode([]byte) error + BlockNumber() uint64 +} + // DAChunk groups consecutive DABlocks with their transactions. type DAChunk interface { Encode() ([]byte, error) From 2ac7825e6100d94accaf115eb91a9850e86ea64c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:31:21 +0800 Subject: [PATCH 36/46] fixes --- encoding/codecv4.go | 2 +- encoding/interfaces.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 6f9e029..e387986 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -32,7 +32,7 @@ func (o *DACodecV4) Version() CodecVersion { } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (*DABlock, error) { +func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) } diff --git a/encoding/interfaces.go b/encoding/interfaces.go index c8cc064..6c011b3 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -35,7 +35,7 @@ type DABatch interface { type Codec interface { Version() CodecVersion - NewDABlock(*Block, uint64) (*DABlock, error) + NewDABlock(*Block, uint64) (DABlock, error) NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) From 8a6c35fb091ee15c823e58bee5f401debf863d8e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:34:49 +0800 Subject: [PATCH 37/46] fix --- encoding/codecv0.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 2a779b7..c675b95 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -39,7 +39,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } daBlock := &DABlockV0{ - BlockNumber: block.Header.Number.Uint64(), + Number: block.Header.Number.Uint64(), Timestamp: block.Header.Time, BaseFee: block.Header.BaseFee, GasLimit: block.Header.GasLimit, From 83f6b627f52453f5770186ee163e3834fb92173c Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:48:22 +0800 Subject: [PATCH 38/46] add CodecFromVersion and CodecFromConfig --- encoding/interfaces.go | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 6c011b3..fa78f4f 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -2,9 +2,11 @@ package encoding import ( "fmt" + "math/big" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/params" ) // DABlock represents a Data Availability Block. @@ -66,8 +68,8 @@ const ( CodecV4 ) -// GetCodec returns the appropriate codec for the given version. -func GetCodec(version CodecVersion) (Codec, error) { +// CodecFromVersion returns the appropriate codec for the given version. +func CodecFromVersion(version CodecVersion) (Codec, error) { switch version { case CodecV0: return &DACodecV0{}, nil @@ -83,3 +85,18 @@ func GetCodec(version CodecVersion) (Codec, error) { return nil, fmt.Errorf("unsupported codec version: %d", version) } } + +// CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. +func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { + if chainCfg.IsDarwinV2(startBlockTimestamp) { + return &DACodecV4{} + } else if chainCfg.IsDarwin(startBlockTimestamp) { + return &DACodecV3{} + } else if chainCfg.IsCurie(startBlockNumber) { + return &DACodecV2{} + } else if chainCfg.IsBernoulli(startBlockNumber) { + return &DACodecV1{} + } else { + return &DACodecV0{} + } +} From c4a249537b1caf00216e37cbef1aaf86ea463b19 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 6 Sep 2024 00:49:27 +0800 Subject: [PATCH 39/46] remove GetCodecVersion --- encoding/da.go | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index 567579a..b09a677 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -9,7 +9,6 @@ import ( "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" - "github.com/scroll-tech/go-ethereum/params" ) // BLSModulus is the BLS modulus defined in EIP-4844. @@ -415,22 +414,6 @@ func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { return uint64(len(rlpTxData)), nil } -// GetCodecVersion determines the codec version based on hain configuration, block number, and timestamp. -func GetCodecVersion(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) CodecVersion { - switch { - case startBlockNumber.Uint64() == 0 || !chainCfg.IsBernoulli(startBlockNumber): - return CodecV0 // codecv0: genesis batch or batches before Bernoulli - case !chainCfg.IsCurie(startBlockNumber): - return CodecV1 // codecv1: batches after Bernoulli and before Curie - case !chainCfg.IsDarwin(startBlockTimestamp): - return CodecV2 // codecv2: batches after Curie and before Darwin - case !chainCfg.IsDarwinV2(startBlockTimestamp): - return CodecV3 // codecv3: batches after Darwin - default: - return CodecV4 // codecv4: batches after DarwinV2 - } -} - // BlobDataProofFromValues creates the blob data proof from the given values. // Memory layout of ``_blobDataProof``: // | z | y | kzg_commitment | kzg_proof | From 10af8e74d5360b8082e7da0c389a57f391ec0ebc Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Wed, 18 Sep 2024 21:09:36 +0800 Subject: [PATCH 40/46] fix typos --- encoding/da.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index b09a677..b634967 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -204,11 +204,11 @@ func (c *Chunk) NumL2Transactions() uint64 { // L2GasUsed calculates the total gas of L2 transactions in a Chunk. func (c *Chunk) L2GasUsed() uint64 { - var totalTxNum uint64 + var totalGasUsed uint64 for _, block := range c.Blocks { - totalTxNum += block.Header.GasUsed + totalGasUsed += block.Header.GasUsed } - return totalTxNum + return totalGasUsed } // StateRoot gets the state root after committing/finalizing the batch. From 1594e0fc92e7b345363ec6a7d677a50887c9d002 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 00:25:19 +0800 Subject: [PATCH 41/46] make Block fields internal --- encoding/codecv0.go | 18 ++++----- encoding/dablock.go | 91 ++++++++++++++++++++++++++++-------------- encoding/dachunk.go | 4 +- encoding/interfaces.go | 2 +- 4 files changed, 72 insertions(+), 43 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index c675b95..736510d 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -38,14 +38,14 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := &DABlockV0{ - Number: block.Header.Number.Uint64(), - Timestamp: block.Header.Time, - BaseFee: block.Header.BaseFee, - GasLimit: block.Header.GasLimit, - NumTransactions: uint16(numTransactions), - NumL1Messages: uint16(numL1Messages), - } + daBlock := NewDABlockImpl( + block.Header.Number.Uint64(), + block.Header.Time, + block.Header.BaseFee, + block.Header.GasLimit, + uint16(numTransactions), + uint16(numL1Messages), + ) return daBlock, nil } @@ -355,7 +355,7 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + blocks[i] = &DABlockImpl{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err diff --git a/encoding/dablock.go b/encoding/dablock.go index adda6a0..bea446c 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,55 +6,84 @@ import ( "math/big" ) -// DABlockV0 represents a Data Availability Block. -type DABlockV0 struct { - Number uint64 - Timestamp uint64 - BaseFee *big.Int - GasLimit uint64 - NumTransactions uint16 - NumL1Messages uint16 +// DABlockImpl represents a Data Availability Block. +type DABlockImpl struct { + number uint64 + timestamp uint64 + baseFee *big.Int + gasLimit uint64 + numTransactions uint16 + numL1Messages uint16 +} + +// NewDABlockImpl is a constructor function for DABlockImpl that initializes the internal fields. +func NewDABlockImpl(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockImpl { + return &DABlockImpl{ + number: number, + timestamp: timestamp, + baseFee: baseFee, + gasLimit: gasLimit, + numTransactions: numTransactions, + numL1Messages: numL1Messages, + } } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlockV0) Encode() []byte { +func (b *DABlockImpl) Encode() []byte { bytes := make([]byte, BlockContextByteSize) - binary.BigEndian.PutUint64(bytes[0:], b.Number) - binary.BigEndian.PutUint64(bytes[8:], b.Timestamp) - if b.BaseFee != nil { - binary.BigEndian.PutUint64(bytes[40:], b.BaseFee.Uint64()) + binary.BigEndian.PutUint64(bytes[0:], b.number) + binary.BigEndian.PutUint64(bytes[8:], b.timestamp) + if b.baseFee != nil { + binary.BigEndian.PutUint64(bytes[40:], b.baseFee.Uint64()) } - binary.BigEndian.PutUint64(bytes[48:], b.GasLimit) - binary.BigEndian.PutUint16(bytes[56:], b.NumTransactions) - binary.BigEndian.PutUint16(bytes[58:], b.NumL1Messages) + binary.BigEndian.PutUint64(bytes[48:], b.gasLimit) + binary.BigEndian.PutUint16(bytes[56:], b.numTransactions) + binary.BigEndian.PutUint16(bytes[58:], b.numL1Messages) return bytes } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockV0) Decode(bytes []byte) error { +func (b *DABlockImpl) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } - b.Number = binary.BigEndian.Uint64(bytes[0:8]) - b.Timestamp = binary.BigEndian.Uint64(bytes[8:16]) - b.BaseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) - b.GasLimit = binary.BigEndian.Uint64(bytes[48:56]) - b.NumTransactions = binary.BigEndian.Uint16(bytes[56:58]) - b.NumL1Messages = binary.BigEndian.Uint16(bytes[58:60]) + b.number = binary.BigEndian.Uint64(bytes[0:8]) + b.timestamp = binary.BigEndian.Uint64(bytes[8:16]) + b.baseFee = new(big.Int).SetUint64(binary.BigEndian.Uint64(bytes[40:48])) + b.gasLimit = binary.BigEndian.Uint64(bytes[48:56]) + b.numTransactions = binary.BigEndian.Uint16(bytes[56:58]) + b.numL1Messages = binary.BigEndian.Uint16(bytes[58:60]) return nil } -func (b *DABlockV0) BlockNumber() uint64 { - return b.Number +// Number returns the block number. +func (b *DABlockImpl) Number() uint64 { + return b.number +} + +// Timestamp returns the block timestamp. +func (b *DABlockImpl) Timestamp() uint64 { + return b.timestamp } -// DABlockV1 represents a Data Availability Block. -type DABlockV1 = DABlockV0 +// BaseFee returns the block base fee. +func (b *DABlockImpl) BaseFee() *big.Int { + return b.baseFee +} -// DABlockV2 represents a Data Availability Block. -type DABlockV2 = DABlockV1 +// GasLimit returns the block gas limit. +func (b *DABlockImpl) GasLimit() uint64 { + return b.gasLimit +} -// DABlockV3 represents a Data Availability Block. -type DABlockV3 = DABlockV2 +// NumTransactions returns the number of transactions in the block. +func (b *DABlockImpl) NumTransactions() uint16 { + return b.numTransactions +} + +// NumL1Messages returns the number of L1 messages in the block. +func (b *DABlockImpl) NumL1Messages() uint16 { + return b.numL1Messages +} diff --git a/encoding/dachunk.go b/encoding/dachunk.go index a90c2eb..7858345 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -107,7 +107,7 @@ func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil + return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil } // DAChunkV1 groups consecutive DABlocks with their transactions. @@ -166,7 +166,7 @@ func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].BlockNumber(), c.Blocks[len(c.Blocks)-1].BlockNumber(), nil + return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil } // DAChunkV2 groups consecutive DABlocks with their transactions. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index fa78f4f..43c5967 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -13,7 +13,7 @@ import ( type DABlock interface { Encode() []byte Decode([]byte) error - BlockNumber() uint64 + Number() uint64 } // DAChunk groups consecutive DABlocks with their transactions. From 1f9facd82a2f741013734bae299aab58847b88df Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 00:51:38 +0800 Subject: [PATCH 42/46] make chunk fields internal --- encoding/codecv0.go | 11 +++------ encoding/codecv1.go | 7 ++---- encoding/dachunk.go | 59 +++++++++++++++++++++++++-------------------- 3 files changed, 38 insertions(+), 39 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 736510d..552a95a 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -77,12 +77,9 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := DAChunkV0{ - Blocks: blocks, - Transactions: txs, - } + daChunk := NewDAChunkV0(blocks, txs) - return &daChunk, nil + return daChunk, nil } // NewDABatch creates a DABatch from the provided Batch. @@ -362,9 +359,7 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { } } - chunks = append(chunks, &DAChunkV0{ - Blocks: blocks, - }) + chunks = append(chunks, NewDAChunkV0(blocks, nil)) } return chunks, nil } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index f6260d3..beac048 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -52,12 +52,9 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := DAChunkV1{ - Blocks: blocks, - Transactions: txs, - } + daChunk := NewDAChunkV1(blocks, txs) - return &daChunk, nil + return daChunk, nil } // NewDABatch creates a DABatch from the provided Batch. diff --git a/encoding/dachunk.go b/encoding/dachunk.go index 7858345..61a03e4 100644 --- a/encoding/dachunk.go +++ b/encoding/dachunk.go @@ -12,32 +12,40 @@ import ( "github.com/scroll-tech/go-ethereum/crypto" ) -// DAChunk groups consecutive DABlocks with their transactions. +// DAChunkV0 groups consecutive DABlocks with their transactions. type DAChunkV0 struct { - Blocks []DABlock - Transactions [][]*types.TransactionData + blocks []DABlock + transactions [][]*types.TransactionData +} + +// NewDAChunkV0 is a constructor for DAChunkV0, initializing with blocks and transactions. +func NewDAChunkV0(blocks []DABlock, transactions [][]*types.TransactionData) *DAChunkV0 { + return &DAChunkV0{ + blocks: blocks, + transactions: transactions, + } } // Encode serializes the DAChunk into a slice of bytes. func (c *DAChunkV0) Encode() ([]byte, error) { - if len(c.Blocks) == 0 { + if len(c.blocks) == 0 { return nil, errors.New("number of blocks is 0") } - if len(c.Blocks) > 255 { + if len(c.blocks) > 255 { return nil, errors.New("number of blocks exceeds 1 byte") } var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + chunkBytes = append(chunkBytes, byte(len(c.blocks))) var l2TxDataBytes []byte - for _, block := range c.Blocks { + for _, block := range c.blocks { chunkBytes = append(chunkBytes, block.Encode()...) } - for _, blockTxs := range c.Transactions { + for _, blockTxs := range c.transactions { for _, txData := range blockTxs { if txData.Type == types.L1MessageTxType { continue @@ -78,7 +86,7 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { } // concatenate l1 and l2 tx hashes - for _, blockTxs := range c.Transactions { + for _, blockTxs := range c.transactions { var l1TxHashes []byte var l2TxHashes []byte for _, txData := range blockTxs { @@ -103,22 +111,30 @@ func (c *DAChunkV0) Hash() (common.Hash, error) { // BlockRange returns the block range of the DAChunk. func (c *DAChunkV0) BlockRange() (uint64, uint64, error) { - if len(c.Blocks) == 0 { + if len(c.blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil } // DAChunkV1 groups consecutive DABlocks with their transactions. type DAChunkV1 DAChunkV0 +// NewDAChunkV1 is a constructor for DAChunkV1, initializing with blocks and transactions. +func NewDAChunkV1(blocks []DABlock, transactions [][]*types.TransactionData) *DAChunkV1 { + return &DAChunkV1{ + blocks: blocks, + transactions: transactions, + } +} + // Encode serializes the DAChunk into a slice of bytes. func (c *DAChunkV1) Encode() ([]byte, error) { var chunkBytes []byte - chunkBytes = append(chunkBytes, byte(len(c.Blocks))) + chunkBytes = append(chunkBytes, byte(len(c.blocks))) - for _, block := range c.Blocks { + for _, block := range c.blocks { blockBytes := block.Encode() chunkBytes = append(chunkBytes, blockBytes...) } @@ -131,14 +147,14 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { var dataBytes []byte // concatenate block contexts - for _, block := range c.Blocks { + for _, block := range c.blocks { encodedBlock := block.Encode() // only the first 58 bytes are used in the hashing process dataBytes = append(dataBytes, encodedBlock[:58]...) } // concatenate l1 tx hashes - for _, blockTxs := range c.Transactions { + for _, blockTxs := range c.transactions { for _, txData := range blockTxs { if txData.Type != types.L1MessageTxType { continue @@ -162,18 +178,9 @@ func (c *DAChunkV1) Hash() (common.Hash, error) { // BlockRange returns the block range of the DAChunk. func (c *DAChunkV1) BlockRange() (uint64, uint64, error) { - if len(c.Blocks) == 0 { + if len(c.blocks) == 0 { return 0, 0, errors.New("number of blocks is 0") } - return c.Blocks[0].Number(), c.Blocks[len(c.Blocks)-1].Number(), nil + return c.blocks[0].Number(), c.blocks[len(c.blocks)-1].Number(), nil } - -// DAChunkV2 groups consecutive DABlocks with their transactions. -type DAChunkV2 = DAChunkV1 - -// DAChunkV3 groups consecutive DABlocks with their transactions. -type DAChunkV3 = DAChunkV2 - -// DAChunkV4 groups consecutive DABlocks with their transactions. -type DAChunkV4 = DAChunkV3 From 451eb68f78c2d38db8b4d1b7fa55ebd40d47d949 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 01:55:19 +0800 Subject: [PATCH 43/46] make batch fields internal and add some tweaks --- encoding/codecv0.go | 64 +++++++------- encoding/codecv1.go | 57 ++++++------ encoding/codecv2.go | 52 ++++++----- encoding/codecv3.go | 65 +++++++------- encoding/codecv4.go | 65 +++++++------- encoding/dabatch.go | 205 +++++++++++++++++++++++++++++++------------- encoding/dablock.go | 26 +++--- 7 files changed, 304 insertions(+), 230 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 552a95a..6f30bed 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -38,13 +38,13 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockImpl( - block.Header.Number.Uint64(), - block.Header.Time, - block.Header.BaseFee, - block.Header.GasLimit, - uint16(numTransactions), - uint16(numL1Messages), + daBlock := NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages ) return daBlock, nil @@ -77,7 +77,10 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV0(blocks, txs) + daChunk := NewDAChunkV0( + blocks, // blocks + txs, // transactions + ) return daChunk, nil } @@ -111,17 +114,17 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := DABatchV0{ - Version: uint8(CodecV0), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - } + daBatch := NewDABatchV0( + uint8(CodecV0), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + bitmapBytes, // skippedL1MessageBitmap + ) - return &daBatch, nil + return daBatch, nil } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -149,15 +152,15 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) } - b := &DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[57:89]), - SkippedL1MessageBitmap: data[89:], - } + b := NewDABatchV0( + data[0], // version + binary.BigEndian.Uint64(data[1:9]), // batchIndex + binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped + common.BytesToHash(data[25:57]), // dataHash + common.BytesToHash(data[57:89]), // parentBatchHash + data[89:], // skippedL1MessageBitmap + ) return b, nil } @@ -352,14 +355,17 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { for i := 0; i < numBlocks; i++ { startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockImpl{} + blocks[i] = &DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err } } - chunks = append(chunks, NewDAChunkV0(blocks, nil)) + chunks = append(chunks, NewDAChunkV0( + blocks, // blocks + nil, // transactions + )) } return chunks, nil } diff --git a/encoding/codecv1.go b/encoding/codecv1.go index beac048..4c8ccd2 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -52,7 +52,10 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1(blocks, txs) + daChunk := NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) return daChunk, nil } @@ -86,22 +89,20 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := DABatchV1{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV1), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - blob: blob, - z: z, - } + daBatch := NewDABatchV1( + uint8(CodecV2), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + ) - return &daBatch, nil + return daBatch, nil } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -219,18 +220,18 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) } - b := &DABatchV1{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - } + b := NewDABatchV1( + data[0], // version + binary.BigEndian.Uint64(data[1:9]), // batchIndex + binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped + common.BytesToHash(data[25:57]), // dataHash + common.BytesToHash(data[89:121]), // parentBatchHash + common.BytesToHash(data[57:89]), // blobVersionedHash + data[121:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ) return b, nil } diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 4b11bff..3d4052b 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -67,22 +67,20 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := DABatchV2{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV2), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - blob: blob, - z: z, - } + daBatch := NewDABatchV1( + uint8(CodecV2), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + ) - return &daBatch, nil + return daBatch, nil } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -220,18 +218,18 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) } - b := &DABatchV2{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - SkippedL1MessageBitmap: data[121:], - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - } + b := NewDABatchV1( + data[0], // version + binary.BigEndian.Uint64(data[1:9]), // batchIndex + binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // totalL1MessagePopped + common.BytesToHash(data[25:57]), // dataHash + common.BytesToHash(data[89:121]), // parentBatchHash + common.BytesToHash(data[57:89]), // blobVersionedHash + data[121:], // skippedL1MessageBitmap + nil, // blob + nil, // z + ) return b, nil } diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 6efafae..50437a8 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -66,29 +66,20 @@ func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatchV3{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV3), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, - } - - daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() - if err != nil { - return nil, err - } - - return &daBatch, nil + return NewDABatchV2( + uint8(CodecV3), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -122,22 +113,24 @@ func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) } - b := &DABatchV3{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), - BlobDataProof: [2]common.Hash{ + b := NewDABatchV2WithProof( + data[0], // Version + binary.BigEndian.Uint64(data[1:9]), // BatchIndex + binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped + binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp + common.BytesToHash(data[25:57]), // DataHash + common.BytesToHash(data[89:121]), // ParentBatchHash + common.BytesToHash(data[57:89]), // BlobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // BlobDataProof common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), }, - } + ) return b, nil } diff --git a/encoding/codecv4.go b/encoding/codecv4.go index e387986..eb4af9c 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -77,29 +77,20 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - daBatch := DABatchV4{ - DABatchV0: DABatchV0{ - Version: uint8(CodecV4), - BatchIndex: batch.Index, - L1MessagePopped: totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore, - TotalL1MessagePopped: totalL1MessagePoppedAfter, - DataHash: dataHash, - ParentBatchHash: batch.ParentBatchHash, - SkippedL1MessageBitmap: bitmapBytes, - }, - BlobVersionedHash: blobVersionedHash, - LastBlockTimestamp: lastBlock.Header.Time, - blob: blob, - z: z, - blobBytes: blobBytes, - } - - daBatch.BlobDataProof, err = daBatch.blobDataProofForPICircuit() - if err != nil { - return nil, err - } - - return &daBatch, nil + return NewDABatchV2( + uint8(CodecV3), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) } // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. @@ -247,22 +238,24 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) } - b := &DABatchV4{ - DABatchV0: DABatchV0{ - Version: data[0], - BatchIndex: binary.BigEndian.Uint64(data[1:9]), - L1MessagePopped: binary.BigEndian.Uint64(data[9:17]), - TotalL1MessagePopped: binary.BigEndian.Uint64(data[17:25]), - DataHash: common.BytesToHash(data[25:57]), - ParentBatchHash: common.BytesToHash(data[89:121]), - }, - BlobVersionedHash: common.BytesToHash(data[57:89]), - LastBlockTimestamp: binary.BigEndian.Uint64(data[121:129]), - BlobDataProof: [2]common.Hash{ + b := NewDABatchV2WithProof( + data[0], // Version + binary.BigEndian.Uint64(data[1:9]), // BatchIndex + binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped + binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp + common.BytesToHash(data[25:57]), // DataHash + common.BytesToHash(data[89:121]), // ParentBatchHash + common.BytesToHash(data[57:89]), // BlobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // BlobDataProof common.BytesToHash(data[129:161]), common.BytesToHash(data[161:193]), }, - } + ) return b, nil } diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 81f0358..481ce5c 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -13,25 +13,38 @@ import ( // DABatchV0 contains metadata about a batch of DAChunks. type DABatchV0 struct { - Version uint8 - BatchIndex uint64 - L1MessagePopped uint64 - TotalL1MessagePopped uint64 - DataHash common.Hash - ParentBatchHash common.Hash - SkippedL1MessageBitmap []byte + version uint8 + batchIndex uint64 + l1MessagePopped uint64 + totalL1MessagePopped uint64 + dataHash common.Hash + parentBatchHash common.Hash + skippedL1MessageBitmap []byte +} + +// NewDABatchV0 is a constructor for DABatchV0. +func NewDABatchV0(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash common.Hash, skippedL1MessageBitmap []byte) *DABatchV0 { + return &DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + } } // Encode serializes the DABatch into bytes. func (b *DABatchV0) Encode() []byte { - batchBytes := make([]byte, 89+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.ParentBatchHash[:]) - copy(batchBytes[89:], b.SkippedL1MessageBitmap[:]) + batchBytes := make([]byte, 89+len(b.skippedL1MessageBitmap)) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) + copy(batchBytes[25:], b.dataHash[:]) + copy(batchBytes[57:], b.parentBatchHash[:]) + copy(batchBytes[89:], b.skippedL1MessageBitmap[:]) return batchBytes } @@ -65,24 +78,40 @@ func (b *DABatchV0) BlobDataProofForPointEvaluation() ([]byte, error) { type DABatchV1 struct { DABatchV0 - BlobVersionedHash common.Hash + blobVersionedHash common.Hash + blob *kzg4844.Blob + z *kzg4844.Point +} - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point +// NewDABatchV1 is a constructor for DABatchV1. +func NewDABatchV1(version uint8, batchIndex, l1MessagePopped, totalL1MessagePopped uint64, dataHash, parentBatchHash, blobVersionedHash common.Hash, skippedL1MessageBitmap []byte, blob *kzg4844.Blob, z *kzg4844.Point) *DABatchV1 { + return &DABatchV1{ + DABatchV0: DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + } } // Encode serializes the DABatch into bytes. func (b *DABatchV1) Encode() []byte { - batchBytes := make([]byte, 121+len(b.SkippedL1MessageBitmap)) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:], b.TotalL1MessagePopped) - copy(batchBytes[25:], b.DataHash[:]) - copy(batchBytes[57:], b.BlobVersionedHash[:]) - copy(batchBytes[89:], b.ParentBatchHash[:]) - copy(batchBytes[121:], b.SkippedL1MessageBitmap[:]) + batchBytes := make([]byte, 121+len(b.skippedL1MessageBitmap)) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:], b.totalL1MessagePopped) + copy(batchBytes[25:], b.dataHash[:]) + copy(batchBytes[57:], b.blobVersionedHash[:]) + copy(batchBytes[89:], b.parentBatchHash[:]) + copy(batchBytes[121:], b.skippedL1MessageBitmap[:]) return batchBytes } @@ -121,7 +150,7 @@ func (b *DABatchV1) Blob() *kzg4844.Blob { // BlobVersionedHashes returns the blob versioned hashes of the batch. func (b *DABatchV1) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.BlobVersionedHash} + return []common.Hash{b.blobVersionedHash} } // BlobBytes returns the blob bytes of the batch. @@ -134,48 +163,104 @@ func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, nil } -type DABatchV2 = DABatchV1 - -// DABatchV3 contains metadata about a batch of DAChunks. -type DABatchV3 struct { +// DABatchV2 contains metadata about a batch of DAChunks. +type DABatchV2 struct { DABatchV0 - BlobVersionedHash common.Hash `json:"blob_versioned_hash"` - LastBlockTimestamp uint64 `json:"last_block_timestamp"` - BlobDataProof [2]common.Hash `json:"blob_data_proof"` + // FIXME: export correct JSON format for prover. + blobVersionedHash common.Hash + lastBlockTimestamp uint64 + blobDataProof [2]common.Hash + blob *kzg4844.Blob + z *kzg4844.Point + blobBytes []byte +} + +// NewDABatchV2 is a constructor for DABatchV2 that calls blobDataProofForPICircuit internally. +func NewDABatchV2(version uint8, + batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, + skippedL1MessageBitmap []byte, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, +) (*DABatchV2, error) { + daBatch := &DABatchV2{ + DABatchV0: DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + lastBlockTimestamp: lastBlockTimestamp, + blob: blob, + z: z, + blobBytes: blobBytes, + } + + proof, err := daBatch.blobDataProofForPICircuit() + if err != nil { + return nil, err + } - // blob payload - blob *kzg4844.Blob - z *kzg4844.Point + daBatch.blobDataProof = proof - // for batch task - blobBytes []byte + return daBatch, nil +} + +// NewDABatchV2WithProof is a constructor for DABatchV2 that allows directly passing blobDataProof. +func NewDABatchV2WithProof(version uint8, + batchIndex, l1MessagePopped, totalL1MessagePopped, lastBlockTimestamp uint64, + dataHash, parentBatchHash, blobVersionedHash common.Hash, + skippedL1MessageBitmap []byte, + blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte, + blobDataProof [2]common.Hash, // Accept blobDataProof directly +) *DABatchV2 { + return &DABatchV2{ + DABatchV0: DABatchV0{ + version: version, + batchIndex: batchIndex, + l1MessagePopped: l1MessagePopped, + totalL1MessagePopped: totalL1MessagePopped, + dataHash: dataHash, + parentBatchHash: parentBatchHash, + skippedL1MessageBitmap: skippedL1MessageBitmap, + }, + blobVersionedHash: blobVersionedHash, + lastBlockTimestamp: lastBlockTimestamp, + blob: blob, + z: z, + blobBytes: blobBytes, + blobDataProof: blobDataProof, // Set blobDataProof directly + } } // Encode serializes the DABatch into bytes. -func (b *DABatchV3) Encode() []byte { +func (b *DABatchV2) Encode() []byte { batchBytes := make([]byte, 193) - batchBytes[0] = b.Version - binary.BigEndian.PutUint64(batchBytes[1:9], b.BatchIndex) - binary.BigEndian.PutUint64(batchBytes[9:17], b.L1MessagePopped) - binary.BigEndian.PutUint64(batchBytes[17:25], b.TotalL1MessagePopped) - copy(batchBytes[25:57], b.DataHash[:]) - copy(batchBytes[57:89], b.BlobVersionedHash[:]) - copy(batchBytes[89:121], b.ParentBatchHash[:]) - binary.BigEndian.PutUint64(batchBytes[121:129], b.LastBlockTimestamp) - copy(batchBytes[129:161], b.BlobDataProof[0].Bytes()) - copy(batchBytes[161:193], b.BlobDataProof[1].Bytes()) + batchBytes[0] = b.version + binary.BigEndian.PutUint64(batchBytes[1:9], b.batchIndex) + binary.BigEndian.PutUint64(batchBytes[9:17], b.l1MessagePopped) + binary.BigEndian.PutUint64(batchBytes[17:25], b.totalL1MessagePopped) + copy(batchBytes[25:57], b.dataHash[:]) + copy(batchBytes[57:89], b.blobVersionedHash[:]) + copy(batchBytes[89:121], b.parentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[121:129], b.lastBlockTimestamp) + copy(batchBytes[129:161], b.blobDataProof[0].Bytes()) + copy(batchBytes[161:193], b.blobDataProof[1].Bytes()) return batchBytes } // Hash computes the hash of the serialized DABatch. -func (b *DABatchV3) Hash() common.Hash { +func (b *DABatchV2) Hash() common.Hash { bytes := b.Encode() return crypto.Keccak256Hash(bytes) } // blobDataProofForPICircuit computes the abi-encoded blob verification data. -func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { +func (b *DABatchV2) blobDataProofForPICircuit() ([2]common.Hash, error) { if b.blob == nil { return [2]common.Hash{}, errors.New("called blobDataProofForPICircuit with empty blob") } @@ -200,7 +285,7 @@ func (b *DABatchV3) blobDataProofForPICircuit() ([2]common.Hash, error) { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { +func (b *DABatchV2) BlobDataProofForPointEvaluation() ([]byte, error) { if b.blob == nil { return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") } @@ -222,18 +307,16 @@ func (b *DABatchV3) BlobDataProofForPointEvaluation() ([]byte, error) { } // Blob returns the blob of the batch. -func (b *DABatchV3) Blob() *kzg4844.Blob { +func (b *DABatchV2) Blob() *kzg4844.Blob { return b.blob } // BlobVersionedHashes returns the blob versioned hashes of the batch. -func (b *DABatchV3) BlobVersionedHashes() []common.Hash { - return []common.Hash{b.BlobVersionedHash} +func (b *DABatchV2) BlobVersionedHashes() []common.Hash { + return []common.Hash{b.blobVersionedHash} } // BlobBytes returns the blob bytes of the batch. -func (b *DABatchV3) BlobBytes() []byte { +func (b *DABatchV2) BlobBytes() []byte { return b.blobBytes } - -type DABatchV4 = DABatchV3 diff --git a/encoding/dablock.go b/encoding/dablock.go index bea446c..baa7d44 100644 --- a/encoding/dablock.go +++ b/encoding/dablock.go @@ -6,8 +6,8 @@ import ( "math/big" ) -// DABlockImpl represents a Data Availability Block. -type DABlockImpl struct { +// DABlockV0 represents a Data Availability Block. +type DABlockV0 struct { number uint64 timestamp uint64 baseFee *big.Int @@ -16,9 +16,9 @@ type DABlockImpl struct { numL1Messages uint16 } -// NewDABlockImpl is a constructor function for DABlockImpl that initializes the internal fields. -func NewDABlockImpl(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockImpl { - return &DABlockImpl{ +// NewDABlockV0 is a constructor function for DABlockV0 that initializes the internal fields. +func NewDABlockV0(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *DABlockV0 { + return &DABlockV0{ number: number, timestamp: timestamp, baseFee: baseFee, @@ -29,7 +29,7 @@ func NewDABlockImpl(number uint64, timestamp uint64, baseFee *big.Int, gasLimit } // Encode serializes the DABlock into a slice of bytes. -func (b *DABlockImpl) Encode() []byte { +func (b *DABlockV0) Encode() []byte { bytes := make([]byte, BlockContextByteSize) binary.BigEndian.PutUint64(bytes[0:], b.number) binary.BigEndian.PutUint64(bytes[8:], b.timestamp) @@ -43,7 +43,7 @@ func (b *DABlockImpl) Encode() []byte { } // Decode populates the fields of a DABlock from a byte slice. -func (b *DABlockImpl) Decode(bytes []byte) error { +func (b *DABlockV0) Decode(bytes []byte) error { if len(bytes) != BlockContextByteSize { return errors.New("block encoding is not BlockContextByteSize bytes long") } @@ -59,31 +59,31 @@ func (b *DABlockImpl) Decode(bytes []byte) error { } // Number returns the block number. -func (b *DABlockImpl) Number() uint64 { +func (b *DABlockV0) Number() uint64 { return b.number } // Timestamp returns the block timestamp. -func (b *DABlockImpl) Timestamp() uint64 { +func (b *DABlockV0) Timestamp() uint64 { return b.timestamp } // BaseFee returns the block base fee. -func (b *DABlockImpl) BaseFee() *big.Int { +func (b *DABlockV0) BaseFee() *big.Int { return b.baseFee } // GasLimit returns the block gas limit. -func (b *DABlockImpl) GasLimit() uint64 { +func (b *DABlockV0) GasLimit() uint64 { return b.gasLimit } // NumTransactions returns the number of transactions in the block. -func (b *DABlockImpl) NumTransactions() uint16 { +func (b *DABlockV0) NumTransactions() uint16 { return b.numTransactions } // NumL1Messages returns the number of L1 messages in the block. -func (b *DABlockImpl) NumL1Messages() uint16 { +func (b *DABlockV0) NumL1Messages() uint16 { return b.numL1Messages } From 955f375e23003ba5b448006e1ec18233c370130f Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 23 Sep 2024 02:16:24 +0800 Subject: [PATCH 44/46] add JSONFromBytes --- encoding/codecv0.go | 6 ++++++ encoding/codecv1.go | 6 ++++++ encoding/codecv2.go | 6 ++++++ encoding/codecv3.go | 16 ++++++++++++++++ encoding/codecv4.go | 5 +++++ encoding/dabatch.go | 37 ++++++++++++++++++++++++++++++++++++- encoding/interfaces.go | 3 ++- 7 files changed, 77 insertions(+), 2 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 6f30bed..87a5a01 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -369,3 +369,9 @@ func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { } return chunks, nil } + +// JSONFromBytes for CodecV1 returns empty values. +func (c *DACodecV0) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV0 doesn't need this, so just return empty values + return nil, nil +} diff --git a/encoding/codecv1.go b/encoding/codecv1.go index 4c8ccd2..a8ab0ea 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1.go @@ -447,3 +447,9 @@ func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV0{}).DecodeDAChunks(bytes) } + +// JSONFromBytes for CodecV1 returns empty values. +func (c *DACodecV1) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV1 doesn't need this, so just return empty values + return nil, nil +} diff --git a/encoding/codecv2.go b/encoding/codecv2.go index 3d4052b..4519fe0 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2.go @@ -344,3 +344,9 @@ func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV1{}).DecodeDAChunks(bytes) } + +// JSONFromBytes for CodecV1 returns empty values. +func (c *DACodecV2) JSONFromBytes(data []byte) ([]byte, error) { + // DACodecV2 doesn't need this, so just return empty values + return nil, nil +} diff --git a/encoding/codecv3.go b/encoding/codecv3.go index 50437a8..ab0e288 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -2,6 +2,7 @@ package encoding import ( "encoding/binary" + "encoding/json" "errors" "fmt" "reflect" @@ -198,3 +199,18 @@ func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV2{}).DecodeDAChunks(bytes) } + +// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := o.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatchV2 to JSON: %w", err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv4.go b/encoding/codecv4.go index eb4af9c..9f77171 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -378,3 +378,8 @@ func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { return (&DACodecV3{}).DecodeDAChunks(bytes) } + +// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +func (o *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { + return (&DACodecV3{}).JSONFromBytes(data) +} diff --git a/encoding/dabatch.go b/encoding/dabatch.go index 481ce5c..d23690e 100644 --- a/encoding/dabatch.go +++ b/encoding/dabatch.go @@ -3,6 +3,7 @@ package encoding import ( "encoding/binary" "encoding/hex" + "encoding/json" "errors" "fmt" @@ -167,7 +168,6 @@ func (b *DABatchV1) BlobDataProofForPointEvaluation() ([]byte, error) { type DABatchV2 struct { DABatchV0 - // FIXME: export correct JSON format for prover. blobVersionedHash common.Hash lastBlockTimestamp uint64 blobDataProof [2]common.Hash @@ -320,3 +320,38 @@ func (b *DABatchV2) BlobVersionedHashes() []common.Hash { func (b *DABatchV2) BlobBytes() []byte { return b.blobBytes } + +// MarshalJSON implements the custom JSON serialization for DABatchV2. +// This method is designed to provide prover with batch info in snake_case format. +func (b *DABatchV2) MarshalJSON() ([]byte, error) { + type daBatchV2JSON struct { + Version uint8 `json:"version"` + BatchIndex uint64 `json:"batch_index"` + L1MessagePopped uint64 `json:"l1_message_popped"` + TotalL1MessagePopped uint64 `json:"total_l1_message_popped"` + DataHash string `json:"data_hash"` + ParentBatchHash string `json:"parent_batch_hash"` + SkippedL1MessageBitmap string `json:"skipped_l1_message_bitmap"` + BlobVersionedHash string `json:"blob_versioned_hash"` + LastBlockTimestamp uint64 `json:"last_block_timestamp"` + BlobBytes string `json:"blob_bytes"` + BlobDataProof [2]string `json:"blob_data_proof"` + } + + return json.Marshal(&daBatchV2JSON{ + Version: b.version, + BatchIndex: b.batchIndex, + L1MessagePopped: b.l1MessagePopped, + TotalL1MessagePopped: b.totalL1MessagePopped, + DataHash: b.dataHash.Hex(), + ParentBatchHash: b.parentBatchHash.Hex(), + SkippedL1MessageBitmap: common.Bytes2Hex(b.skippedL1MessageBitmap), + BlobVersionedHash: b.blobVersionedHash.Hex(), + LastBlockTimestamp: b.lastBlockTimestamp, + BlobBytes: common.Bytes2Hex(b.blobBytes), + BlobDataProof: [2]string{ + b.blobDataProof[0].Hex(), + b.blobDataProof[1].Hex(), + }, + }) +} diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 43c5967..fd133e9 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -54,7 +54,8 @@ type Codec interface { EstimateBatchL1CommitGas(*Batch) (uint64, error) EstimateBatchL1CommitCalldataSize(*Batch) (uint64, error) - SetCompression(enable bool) // only used for codecv4 + SetCompression(enable bool) + JSONFromBytes([]byte) ([]byte, error) } // CodecVersion represents the version of the codec. From f73c63e89ffb75828b4b37002bc788173f959a3e Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Mon, 30 Sep 2024 17:06:14 +0800 Subject: [PATCH 45/46] fix a typo --- encoding/codecv3.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv3.go b/encoding/codecv3.go index ab0e288..c348905 100644 --- a/encoding/codecv3.go +++ b/encoding/codecv3.go @@ -209,7 +209,7 @@ func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { jsonBytes, err := json.Marshal(batch) if err != nil { - return nil, fmt.Errorf("failed to marshal DABatchV2 to JSON: %w", err) + return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) } return jsonBytes, nil From cf9f08431fe10dc1963338f808fc934f4f86c6a5 Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Tue, 1 Oct 2024 16:17:41 +0800 Subject: [PATCH 46/46] use register mode --- encoding/bitmap.go | 4 +- encoding/{ => codecv0}/codecv0.go | 136 +++---- encoding/{ => codecv1}/codecv1.go | 184 ++++++---- encoding/{ => codecv2}/codecv2.go | 293 ++++++++++++--- encoding/codecv3.go | 216 ----------- encoding/codecv3/codecv3.go | 574 ++++++++++++++++++++++++++++++ encoding/{ => codecv4}/codecv4.go | 312 +++++++++++++--- encoding/da.go | 3 +- encoding/interfaces.go | 47 +-- 9 files changed, 1310 insertions(+), 459 deletions(-) rename encoding/{ => codecv0}/codecv0.go (65%) rename encoding/{ => codecv1}/codecv1.go (64%) rename encoding/{ => codecv2}/codecv2.go (51%) delete mode 100644 encoding/codecv3.go create mode 100644 encoding/codecv3/codecv3.go rename encoding/{ => codecv4}/codecv4.go (51%) diff --git a/encoding/bitmap.go b/encoding/bitmap.go index da4386e..7ada6d6 100644 --- a/encoding/bitmap.go +++ b/encoding/bitmap.go @@ -7,8 +7,8 @@ import ( "github.com/scroll-tech/go-ethereum/core/types" ) -// constructSkippedBitmap constructs skipped L1 message bitmap of the batch. -func constructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { +// ConstructSkippedBitmap constructs skipped L1 message bitmap of the batch. +func ConstructSkippedBitmap(batchIndex uint64, chunks []*Chunk, totalL1MessagePoppedBefore uint64) ([]byte, uint64, error) { // skipped L1 message bitmap, an array of 256-bit bitmaps var skippedBitmap []*big.Int diff --git a/encoding/codecv0.go b/encoding/codecv0/codecv0.go similarity index 65% rename from encoding/codecv0.go rename to encoding/codecv0/codecv0.go index 87a5a01..c8fcd63 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0/codecv0.go @@ -1,4 +1,4 @@ -package encoding +package codecv0 import ( "encoding/binary" @@ -7,6 +7,7 @@ import ( "math" "reflect" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -14,13 +15,20 @@ import ( type DACodecV0 struct{} +// init registers the DACodecV0 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV0, func() encoding.Codec { + return &DACodecV0{} + }) +} + // Version returns the codec version. -func (o *DACodecV0) Version() CodecVersion { - return CodecV0 +func (o *DACodecV0) Version() encoding.CodecVersion { + return encoding.CodecV0 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { +func (o *DACodecV0) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -38,7 +46,7 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := NewDABlockV0( + daBlock := encoding.NewDABlockV0( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -51,8 +59,8 @@ func (o *DACodecV0) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - var blocks []DABlock +func (o *DACodecV0) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + var blocks []encoding.DABlock var txs [][]*types.TransactionData if chunk == nil { @@ -77,7 +85,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV0( + daChunk := encoding.NewDAChunkV0( blocks, // blocks txs, // transactions ) @@ -86,7 +94,7 @@ func (o *DACodecV0) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV0) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // compute batch data hash var dataBytes []byte totalL1MessagePoppedBeforeChunk := batch.TotalL1MessagePoppedBefore @@ -109,14 +117,14 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { dataHash := crypto.Keccak256Hash(dataBytes) // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } - daBatch := NewDABatchV0( - uint8(CodecV0), // version - batch.Index, // batchIndex + daBatch := encoding.NewDABatchV0( + uint8(encoding.CodecV0), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -129,7 +137,7 @@ func (o *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -143,16 +151,16 @@ func (o *DACodecV0) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV0) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) < 89 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 89 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV0 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV0) + if encoding.CodecVersion(data[0]) != encoding.CodecV0 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV0) } - b := NewDABatchV0( + b := encoding.NewDABatchV0( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -166,25 +174,25 @@ func (o *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitCalldataSize(b *encoding.Block) (uint64, error) { var size uint64 for _, txData := range b.Transactions { if txData.Type == types.L1MessageTxType { continue } size += 4 // 4 bytes payload length - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := encoding.GetTxPayloadLength(txData) if err != nil { return 0, err } size += txPayloadLength } - size += BlockContextByteSize + size += encoding.BlockContextByteSize return size, nil } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (o *DACodecV0) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -193,16 +201,16 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { continue } - txPayloadLength, err := getTxPayloadLength(txData) + txPayloadLength, err := encoding.GetTxPayloadLength(txData) if err != nil { return 0, err } - total += CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero - total += CalldataNonZeroByteGas * 4 // 4 bytes payload length - total += GetKeccak256Gas(txPayloadLength) // l2 tx hash + total += encoding.CalldataNonZeroByteGas * txPayloadLength // an over-estimate: treat each byte as non-zero + total += encoding.CalldataNonZeroByteGas * 4 // 4 bytes payload length + total += encoding.GetKeccak256Gas(txPayloadLength) // l2 tx hash } - total += CalldataNonZeroByteGas * BlockContextByteSize + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -211,17 +219,17 @@ func (o *DACodecV0) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, block := range c.Blocks { blockL1CommitCalldataSize, err := o.EstimateBlockL1CommitCalldataSize(block) @@ -234,7 +242,7 @@ func (o *DACodecV0) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { var totalTxNum uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -247,35 +255,35 @@ func (o *DACodecV0) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += CalldataNonZeroByteGas * numBlocks * BlockContextByteSize // numBlocks of BlockContext in chunk + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += encoding.CalldataNonZeroByteGas * numBlocks * encoding.BlockContextByteSize // numBlocks of BlockContext in chunk - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalTxNum) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -289,21 +297,21 @@ func (o *DACodecV0) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) totalL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) if err != nil { return 0, err } - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -316,22 +324,22 @@ func (o *DACodecV0) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { +func (o *DACodecV0) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (o *DACodecV0) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { +func (o *DACodecV0) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { +func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { return 0, 0, nil } @@ -339,30 +347,30 @@ func (o *DACodecV0) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, func (o *DACodecV0) SetCompression(enable bool) {} // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - var chunks []DAChunk +func (o *DACodecV0) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk for _, chunk := range bytes { if len(chunk) < 1 { return nil, fmt.Errorf("invalid chunk, length is less than 1") } numBlocks := int(chunk[0]) - if len(chunk) < 1+numBlocks*BlockContextByteSize { - return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*BlockContextByteSize) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) } - blocks := make([]DABlock, numBlocks) + blocks := make([]encoding.DABlock, numBlocks) for i := 0; i < numBlocks; i++ { - startIdx := 1 + i*BlockContextByteSize // add 1 to skip numBlocks byte - endIdx := startIdx + BlockContextByteSize - blocks[i] = &DABlockV0{} + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} err := blocks[i].Decode(chunk[startIdx:endIdx]) if err != nil { return nil, err } } - chunks = append(chunks, NewDAChunkV0( + chunks = append(chunks, encoding.NewDAChunkV0( blocks, // blocks nil, // transactions )) diff --git a/encoding/codecv1.go b/encoding/codecv1/codecv1.go similarity index 64% rename from encoding/codecv1.go rename to encoding/codecv1/codecv1.go index a8ab0ea..30a4373 100644 --- a/encoding/codecv1.go +++ b/encoding/codecv1/codecv1.go @@ -1,13 +1,15 @@ -package encoding +package codecv1 import ( "crypto/sha256" "encoding/binary" "errors" "fmt" + "math" "math/big" "reflect" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" @@ -16,21 +18,54 @@ import ( type DACodecV1 struct{} +// init registers the DACodecV1 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV1, func() encoding.Codec { + return &DACodecV1{} + }) +} + // Codecv1MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv1MaxNumChunks = 15 // Version returns the codec version. -func (o *DACodecV1) Version() CodecVersion { - return CodecV1 +func (o *DACodecV1) Version() encoding.CodecVersion { + return encoding.CodecV1 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV1) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV0{}).NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV1) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { +func (o *DACodecV1) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { if len(chunk.Blocks) == 0 { return nil, errors.New("number of blocks is 0") } @@ -39,7 +74,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks exceeds 1 byte") } - var blocks []DABlock + var blocks []encoding.DABlock var txs [][]*types.TransactionData for _, block := range chunk.Blocks { @@ -52,7 +87,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := NewDAChunkV1( + daChunk := encoding.NewDAChunkV1( blocks, // blocks txs, // transactions ) @@ -61,7 +96,7 @@ func (o *DACodecV1) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV1) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv1MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -78,7 +113,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -89,9 +124,9 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV1( - uint8(CodecV2), // version - batch.Index, // batchIndex + daBatch := encoding.NewDABatchV1( + uint8(encoding.CodecV1), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -107,7 +142,7 @@ func (o *DACodecV1) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -121,7 +156,7 @@ func (o *DACodecV1) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { +func (o *DACodecV1) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv1MaxNumChunks*4 @@ -150,7 +185,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, err } @@ -181,7 +216,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* copy(challengePreimage[0:], hash[:]) // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, err } @@ -198,7 +233,7 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -211,16 +246,16 @@ func (o *DACodecV1) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV1) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV1 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV1) + if encoding.CodecVersion(data[0]) != encoding.CodecV1 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV1) } - b := NewDABatchV1( + b := encoding.NewDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -237,17 +272,17 @@ func (o *DACodecV1) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBlobSize estimates the size of the L1 commit blob for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *Chunk) (uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitBlobSize(c *encoding.Chunk) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) // over-estimate: adding metadata length chunkDataSize, err := o.chunkL1CommitBlobDataSize(c) if err != nil { return 0, err } - return CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + chunkDataSize), nil } // EstimateBatchL1CommitBlobSize estimates the total size of the L1 commit blob for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *encoding.Batch) (uint64, error) { metadataSize := uint64(2 + 4*Codecv1MaxNumChunks) var batchDataSize uint64 for _, c := range b.Chunks { @@ -257,10 +292,10 @@ func (o *DACodecV1) EstimateBatchL1CommitBlobSize(b *Batch) (uint64, error) { } batchDataSize += chunkDataSize } - return CalculatePaddedBlobSize(metadataSize + batchDataSize), nil + return encoding.CalculatePaddedBlobSize(metadataSize + batchDataSize), nil } -func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { +func (o *DACodecV1) chunkL1CommitBlobDataSize(c *encoding.Chunk) (uint64, error) { var dataSize uint64 for _, block := range c.Blocks { for _, tx := range block.Transactions { @@ -268,7 +303,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { continue } - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, false /* no mock */) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, false /* no mock */) if err != nil { return 0, err } @@ -279,7 +314,7 @@ func (o *DACodecV1) chunkL1CommitBlobDataSize(c *Chunk) (uint64, error) { } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { +func (o *DACodecV1) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { var total uint64 var numL1Messages uint64 for _, txData := range b.Transactions { @@ -289,7 +324,7 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { } } - total += CalldataNonZeroByteGas * BlockContextByteSize + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize // sload total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue @@ -298,22 +333,22 @@ func (o *DACodecV1) EstimateBlockL1CommitGas(b *Block) (uint64, error) { total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue - total += GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy - total += 100 * numL1Messages // read admin in proxy - total += 100 * numL1Messages // read impl in proxy - total += 100 * numL1Messages // access impl - total += GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl return total, nil } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return uint64(BlockContextByteSize * len(c.Blocks)), nil +func (o *DACodecV1) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { var totalNonSkippedL1Messages uint64 var totalL1CommitGas uint64 for _, block := range c.Blocks { @@ -326,34 +361,34 @@ func (o *DACodecV1) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { } numBlocks := uint64(len(c.Blocks)) - totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload - totalL1CommitGas += CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata - totalL1CommitGas += GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { var totalL1CommitGas uint64 // Add extra gas costs - totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc - totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch - totalL1CommitGas += 20000 // 1 time sstore - totalL1CommitGas += 21000 // base fee for tx - totalL1CommitGas += CalldataNonZeroByteGas // version in calldata + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata // adjusting gas: // add 1 time cold sload (2100 gas) for L1MessageQueue // add 1 time cold address access (2600 gas) for L1MessageQueue // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) totalL1CommitGas += (2100 + 2600 - 100 - 100) - totalL1CommitGas += GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) - totalL1CommitGas += CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata // adjust batch data hash gas cost - totalL1CommitGas += GetKeccak256Gas(uint64(32 * len(b.Chunks))) + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore @@ -367,8 +402,8 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk - totalL1CommitGas += CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) - totalL1CommitGas += GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) var totalL1CommitCalldataSize uint64 chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -376,14 +411,14 @@ func (o *DACodecV1) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { return 0, err } totalL1CommitCalldataSize += chunkL1CommitCalldataSize - totalL1CommitGas += GetMemoryExpansionCost(totalL1CommitCalldataSize) + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) } return totalL1CommitGas, nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { var totalL1CommitCalldataSize uint64 for _, chunk := range b.Chunks { chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) @@ -396,22 +431,22 @@ func (o *DACodecV1) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { +func (o *DACodecV1) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (o *DACodecV1) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { return true, nil } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { +func (o *DACodecV1) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { return 0, 0, nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { +func (o *DACodecV1) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { return 0, 0, nil } @@ -422,7 +457,7 @@ func (o *DACodecV1) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { +func (o *DACodecV1) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { var dataBytes []byte totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore @@ -444,8 +479,35 @@ func (o *DACodecV1) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBe } // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV0{}).DecodeDAChunks(bytes) +func (o *DACodecV1) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil } // JSONFromBytes for CodecV1 returns empty values. diff --git a/encoding/codecv2.go b/encoding/codecv2/codecv2.go similarity index 51% rename from encoding/codecv2.go rename to encoding/codecv2/codecv2.go index 4519fe0..3ce1cd4 100644 --- a/encoding/codecv2.go +++ b/encoding/codecv2/codecv2.go @@ -1,4 +1,4 @@ -package encoding +package codecv2 import ( "crypto/sha256" @@ -6,6 +6,7 @@ import ( "encoding/hex" "errors" "fmt" + "math" "math/big" "reflect" @@ -15,31 +16,91 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) type DACodecV2 struct{} +// init registers the DACodecV2 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV2, func() encoding.Codec { + return &DACodecV2{} + }) +} + // Codecv2MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv2MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV2) Version() CodecVersion { - return CodecV2 +func (o *DACodecV2) Version() encoding.CodecVersion { + return encoding.CodecV2 } // NewDABlock creates a new DABlock from the given encoding.Block and the total number of L1 messages popped before. -func (o *DACodecV2) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV1{}).NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given encoding.Chunk and the total number of L1 messages popped before. -func (o *DACodecV2) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - return (&DACodecV1{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) +func (o *DACodecV2) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var blocks []encoding.DABlock + var txs [][]*types.TransactionData + + for _, block := range chunk.Blocks { + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := encoding.NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil } // NewDABatch creates a DABatch from the provided encoding.Batch. -func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV2) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv2MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -56,7 +117,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -67,9 +128,9 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { return nil, err } - daBatch := NewDABatchV1( - uint8(CodecV2), // version - batch.Index, // batchIndex + daBatch := encoding.NewDABatchV1( + uint8(encoding.CodecV2), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped dataHash, // dataHash @@ -85,7 +146,7 @@ func (o *DACodecV2) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { daBatch, err := o.NewDABatch(batch) if err != nil { return nil, err @@ -99,7 +160,7 @@ func (o *DACodecV2) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV2) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv2MaxNumChunks*4 @@ -128,7 +189,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -167,7 +228,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // Only apply this check when the uncompressed batch data has exceeded 128 KiB. if !useMockTxData && len(batchBytes) > 131072 { // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -179,7 +240,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -196,7 +257,7 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -209,16 +270,16 @@ func (o *DACodecV2) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV2) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) < 121 { return nil, fmt.Errorf("insufficient data for DABatch, expected at least 121 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV2 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV2) + if encoding.CodecVersion(data[0]) != encoding.CodecV2 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV2) } - b := NewDABatchV1( + b := encoding.NewDABatchV1( data[0], // version binary.BigEndian.Uint64(data[1:9]), // batchIndex binary.BigEndian.Uint64(data[9:17]), // l1MessagePopped @@ -235,8 +296,8 @@ func (o *DACodecV2) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) +func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -244,12 +305,12 @@ func (o *DACodecV2) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) +func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return 0, 0, err } @@ -257,13 +318,13 @@ func (o *DACodecV2) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, if err != nil { return 0, 0, err } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(uint64(len(blobBytes))), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv2MaxNumChunks) +func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -275,7 +336,7 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error if len(batchBytes) <= 131072 { return true, nil } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -284,8 +345,8 @@ func (o *DACodecV2) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a batch payload, compresses the data, and checks the compressed data compatibility if the uncompressed data exceeds 128 KiB. -func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) +func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv2MaxNumChunks) if err != nil { return false, err } @@ -297,7 +358,7 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error if len(batchBytes) <= 131072 { return true, nil } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -305,28 +366,120 @@ func (o *DACodecV2) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return (&DACodecV1{}).EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV2) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - return (&DACodecV1{}).EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV2) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil } // EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. -func (o *DACodecV2) EstimateBlockL1CommitGas(b *Block) (uint64, error) { - return (&DACodecV1{}).EstimateBlockL1CommitGas(b) +func (o *DACodecV2) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + + // sload + total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue + total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + + return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV2) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - return (&DACodecV1{}).EstimateChunkL1CommitGas(c) +func (o *DACodecV2) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV2) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - return (&DACodecV1{}).EstimateBatchL1CommitGas(b) +func (o *DACodecV2) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + } + + return totalL1CommitGas, nil } // SetCompression enables or disables compression. @@ -336,13 +489,57 @@ func (o *DACodecV2) SetCompression(enable bool) {} // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV2) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV1{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV2) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil } // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV1{}).DecodeDAChunks(bytes) +func (o *DACodecV2) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil } // JSONFromBytes for CodecV1 returns empty values. diff --git a/encoding/codecv3.go b/encoding/codecv3.go deleted file mode 100644 index c348905..0000000 --- a/encoding/codecv3.go +++ /dev/null @@ -1,216 +0,0 @@ -package encoding - -import ( - "encoding/binary" - "encoding/json" - "errors" - "fmt" - "reflect" - - "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/crypto/kzg4844" -) - -type DACodecV3 struct{} - -// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. -const Codecv3MaxNumChunks = 45 - -// Version returns the codec version. -func (o *DACodecV3) Version() CodecVersion { - return CodecV3 -} - -// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV3) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV2{}).NewDABlock(block, totalL1MessagePoppedBefore) -} - -// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV3) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - return (&DACodecV2{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) -} - -// NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV3) NewDABatch(batch *Batch) (DABatch, error) { - // this encoding can only support a fixed number of chunks per batch - if len(batch.Chunks) > Codecv3MaxNumChunks { - return nil, errors.New("too many chunks in batch") - } - - if len(batch.Chunks) == 0 { - return nil, errors.New("too few chunks in batch") - } - - if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { - return nil, errors.New("too few blocks in last chunk of the batch") - } - - // batch data hash - dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) - if err != nil { - return nil, err - } - - // blob payload - blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) - if err != nil { - return nil, err - } - - lastChunk := batch.Chunks[len(batch.Chunks)-1] - lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - - return NewDABatchV2( - uint8(CodecV3), // version - batch.Index, // batchIndex - totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped - totalL1MessagePoppedAfter, // totalL1MessagePopped - lastBlock.Header.Time, // lastBlockTimestamp - dataHash, // dataHash - batch.ParentBatchHash, // parentBatchHash - blobVersionedHash, // blobVersionedHash - bitmapBytes, // skippedL1MessageBitmap - blob, // blob - z, // z - blobBytes, // blobBytes - ) -} - -// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. -// It also checks if the blob versioned hashes are as expected. -func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { - daBatch, err := o.NewDABatch(batch) - if err != nil { - return nil, err - } - - if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { - return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) - } - - return daBatch, nil -} - -// constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV3) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { - return (&DACodecV2{}).constructBlobPayload(chunks, useMockTxData) -} - -// NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. -func (o *DACodecV3) NewDABatchFromBytes(data []byte) (DABatch, error) { - if len(data) != 193 { - return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) - } - - if CodecVersion(data[0]) != CodecV3 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV3) - } - - b := NewDABatchV2WithProof( - data[0], // Version - binary.BigEndian.Uint64(data[1:9]), // BatchIndex - binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped - binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped - binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp - common.BytesToHash(data[25:57]), // DataHash - common.BytesToHash(data[89:121]), // ParentBatchHash - common.BytesToHash(data[57:89]), // BlobVersionedHash - nil, // skippedL1MessageBitmap - nil, // blob - nil, // z - nil, // blobBytes - [2]common.Hash{ // BlobDataProof - common.BytesToHash(data[129:161]), - common.BytesToHash(data[161:193]), - }, - ) - - return b, nil -} - -// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - return (&DACodecV2{}).EstimateChunkL1CommitBatchSizeAndBlobSize(c) -} - -// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - return (&DACodecV2{}).EstimateBatchL1CommitBatchSizeAndBlobSize(b) -} - -// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - return (&DACodecV2{}).CheckChunkCompressedDataCompatibility(c) -} - -// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - return (&DACodecV2{}).CheckBatchCompressedDataCompatibility(b) -} - -// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return (&DACodecV2{}).EstimateChunkL1CommitCalldataSize(c) -} - -// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - return (&DACodecV2{}).EstimateBatchL1CommitCalldataSize(b) -} - -// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV3) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - chunkL1CommitGas, err := (&DACodecV2{}).EstimateChunkL1CommitGas(c) - if err != nil { - return 0, err - } - return chunkL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. -} - -// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV3) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - batchL1CommitGas, err := (&DACodecV2{}).EstimateBatchL1CommitGas(b) - if err != nil { - return 0, err - } - return batchL1CommitGas + 50000, nil // plus 50000 for the point-evaluation precompile call. -} - -// SetCompression enables or disables compression. -func (o *DACodecV3) SetCompression(enable bool) {} - -// computeBatchDataHash computes the data hash of the batch. -// Note: The batch hash and batch data hash are two different hashes, -// the former is used for identifying a badge in the contracts, -// the latter is used in the public input to the provers. -func (o *DACodecV3) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV2{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) -} - -// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV2{}).DecodeDAChunks(bytes) -} - -// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. -func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { - batch, err := o.NewDABatchFromBytes(data) - if err != nil { - return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) - } - - jsonBytes, err := json.Marshal(batch) - if err != nil { - return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) - } - - return jsonBytes, nil -} diff --git a/encoding/codecv3/codecv3.go b/encoding/codecv3/codecv3.go new file mode 100644 index 0000000..23a66b7 --- /dev/null +++ b/encoding/codecv3/codecv3.go @@ -0,0 +1,574 @@ +package codecv3 + +import ( + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "math" + "math/big" + "reflect" + + "github.com/scroll-tech/da-codec/encoding" + "github.com/scroll-tech/da-codec/encoding/zstd" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" +) + +type DACodecV3 struct{} + +// init registers the DACodecV3 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV3, func() encoding.Codec { + return &DACodecV3{} + }) +} + +// Codecv3MaxNumChunks is the maximum number of chunks that a batch can contain. +const Codecv3MaxNumChunks = 45 + +// Version returns the codec version. +func (o *DACodecV3) Version() encoding.CodecVersion { + return encoding.CodecV3 +} + +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (o *DACodecV3) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil +} + +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +func (o *DACodecV3) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var blocks []encoding.DABlock + var txs [][]*types.TransactionData + + for _, block := range chunk.Blocks { + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := encoding.NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil +} + +// NewDABatch creates a DABatch from the provided Batch. +func (o *DACodecV3) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { + // this encoding can only support a fixed number of chunks per batch + if len(batch.Chunks) > Codecv3MaxNumChunks { + return nil, errors.New("too many chunks in batch") + } + + if len(batch.Chunks) == 0 { + return nil, errors.New("too few chunks in batch") + } + + if len(batch.Chunks[len(batch.Chunks)-1].Blocks) == 0 { + return nil, errors.New("too few blocks in last chunk of the batch") + } + + // batch data hash + dataHash, err := o.computeBatchDataHash(batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // skipped L1 messages bitmap + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + + // blob payload + blob, blobVersionedHash, z, blobBytes, err := o.constructBlobPayload(batch.Chunks, false /* no mock */) + if err != nil { + return nil, err + } + + lastChunk := batch.Chunks[len(batch.Chunks)-1] + lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] + + return encoding.NewDABatchV2( + uint8(encoding.CodecV3), // version + batch.Index, // batchIndex + totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped + totalL1MessagePoppedAfter, // totalL1MessagePopped + lastBlock.Header.Time, // lastBlockTimestamp + dataHash, // dataHash + batch.ParentBatchHash, // parentBatchHash + blobVersionedHash, // blobVersionedHash + bitmapBytes, // skippedL1MessageBitmap + blob, // blob + z, // z + blobBytes, // blobBytes + ) +} + +// NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. +// It also checks if the blob versioned hashes are as expected. +func (o *DACodecV3) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { + daBatch, err := o.NewDABatch(batch) + if err != nil { + return nil, err + } + + if !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { + return nil, fmt.Errorf("blob versioned hashes do not match. Expected: %v, Got: %v", hashes, daBatch.BlobVersionedHashes()) + } + + return daBatch, nil +} + +// constructBlobPayload constructs the 4844 blob payload. +func (o *DACodecV3) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { + // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) + metadataLength := 2 + Codecv3MaxNumChunks*4 + + // batchBytes represents the raw (un-compressed and un-padded) blob payload + batchBytes := make([]byte, metadataLength) + + // challenge digest preimage + // 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash + challengePreimage := make([]byte, (1+Codecv3MaxNumChunks+1)*32) + + // the chunk data hash used for calculating the challenge preimage + var chunkDataHash common.Hash + + // blob metadata: num_chunks + binary.BigEndian.PutUint16(batchBytes[0:], uint16(len(chunks))) + + // encode blob metadata and L2 transactions, + // and simultaneously also build challenge preimage + for chunkID, chunk := range chunks { + currentChunkStartIndex := len(batchBytes) + + for _, block := range chunk.Blocks { + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + // encode L2 txs into blob payload + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + batchBytes = append(batchBytes, rlpTxData...) + } + } + + // blob metadata: chunki_size + if chunkSize := len(batchBytes) - currentChunkStartIndex; chunkSize != 0 { + binary.BigEndian.PutUint32(batchBytes[2+4*chunkID:], uint32(chunkSize)) + } + + // challenge: compute chunk data hash + chunkDataHash = crypto.Keccak256Hash(batchBytes[currentChunkStartIndex:]) + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // if we have fewer than Codecv2MaxNumChunks chunks, the rest + // of the blob metadata is correctly initialized to 0, + // but we need to add padding to the challenge preimage + for chunkID := len(chunks); chunkID < Codecv3MaxNumChunks; chunkID++ { + // use the last chunk's data hash as padding + copy(challengePreimage[32+chunkID*32:], chunkDataHash[:]) + } + + // challenge: compute metadata hash + hash := crypto.Keccak256Hash(batchBytes[0:metadataLength]) + copy(challengePreimage[0:], hash[:]) + + // blobBytes represents the compressed blob payload (batchBytes) + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if !useMockTxData && len(batchBytes) > 131072 { + // Check compressed data compatibility. + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, err + } + } + + if len(blobBytes) > 126976 { + log.Error("constructBlobPayload: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, nil, errors.New("Blob payload exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := encoding.MakeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, nil, err + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, nil, errors.New("failed to create blob commitment") + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + // challenge: append blob versioned hash + copy(challengePreimage[(1+Codecv3MaxNumChunks)*32:], blobVersionedHash[:]) + + // compute z = challenge_digest % BLS_MODULUS + challengeDigest := crypto.Keccak256Hash(challengePreimage) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) + pointBytes := pointBigInt.Bytes() + + // the challenge point z + var z kzg4844.Point + start := 32 - len(pointBytes) + copy(z[start:], pointBytes) + + return blob, blobVersionedHash, &z, blobBytes, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. +func (o *DACodecV3) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { + if len(data) != 193 { + return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) + } + + if encoding.CodecVersion(data[0]) != encoding.CodecV3 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV3) + } + + b := encoding.NewDABatchV2WithProof( + data[0], // Version + binary.BigEndian.Uint64(data[1:9]), // BatchIndex + binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped + binary.BigEndian.Uint64(data[17:25]), // TotalL1MessagePopped + binary.BigEndian.Uint64(data[121:129]), // LastBlockTimestamp + common.BytesToHash(data[25:57]), // DataHash + common.BytesToHash(data[89:121]), // ParentBatchHash + common.BytesToHash(data[57:89]), // BlobVersionedHash + nil, // skippedL1MessageBitmap + nil, // blob + nil, // z + nil, // blobBytes + [2]common.Hash{ // BlobDataProof + common.BytesToHash(data[129:161]), + common.BytesToHash(data[161:193]), + }, + ) + + return b, nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. +func (o *DACodecV3) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv3MaxNumChunks) + if err != nil { + return 0, 0, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. +func (o *DACodecV3) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + if err != nil { + return 0, 0, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return 0, 0, err + } + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +func (o *DACodecV3) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv3MaxNumChunks) + if err != nil { + return false, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if len(batchBytes) <= 131072 { + return true, nil + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (o *DACodecV3) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv3MaxNumChunks) + if err != nil { + return false, err + } + blobBytes, err := zstd.CompressScrollBatchBytes(batchBytes) + if err != nil { + return false, err + } + // Only apply this check when the uncompressed batch data has exceeded 128 KiB. + if len(batchBytes) <= 131072 { + return true, nil + } + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { + log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return false, nil + } + return true, nil +} + +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +func (o *DACodecV3) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +func (o *DACodecV3) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. +func (o *DACodecV3) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + + // sload + total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue + total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + + return total, nil +} + +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +func (o *DACodecV3) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil +} + +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. +func (o *DACodecV3) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + } + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil +} + +// SetCompression enables or disables compression. +func (o *DACodecV3) SetCompression(enable bool) {} + +// computeBatchDataHash computes the data hash of the batch. +// Note: The batch hash and batch data hash are two different hashes, +// the former is used for identifying a badge in the contracts, +// the latter is used in the public input to the provers. +func (o *DACodecV3) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil +} + +// DecodeDAChunks takes a byte slice and decodes it into a []DAChunk +func (o *DACodecV3) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil +} + +// JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. +func (o *DACodecV3) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := o.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv4.go b/encoding/codecv4/codecv4.go similarity index 51% rename from encoding/codecv4.go rename to encoding/codecv4/codecv4.go index 9f77171..bffeb9c 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4/codecv4.go @@ -1,11 +1,13 @@ -package encoding +package codecv4 import ( "crypto/sha256" "encoding/binary" "encoding/hex" + "encoding/json" "errors" "fmt" + "math" "math/big" "reflect" "sync/atomic" @@ -16,6 +18,7 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" + "github.com/scroll-tech/da-codec/encoding" "github.com/scroll-tech/da-codec/encoding/zstd" ) @@ -23,26 +26,85 @@ type DACodecV4 struct { enableCompress uint32 } +// init registers the DACodecV4 with the encoding package. +func init() { + encoding.RegisterCodec(encoding.CodecV4, func() encoding.Codec { + return &DACodecV4{} + }) +} + // Codecv4MaxNumChunks is the maximum number of chunks that a batch can contain. const Codecv4MaxNumChunks = 45 // Version returns the codec version. -func (o *DACodecV4) Version() CodecVersion { - return CodecV4 +func (o *DACodecV4) Version() encoding.CodecVersion { + return encoding.CodecV4 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (o *DACodecV4) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - return (&DACodecV3{}).NewDABlock(block, totalL1MessagePoppedBefore) +func (o *DACodecV4) NewDABlock(block *encoding.Block, totalL1MessagePoppedBefore uint64) (encoding.DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1Messages(totalL1MessagePoppedBefore) + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := numL1Messages + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := encoding.NewDABlockV0( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + uint16(numL1Messages), // numL1Messages + ) + + return daBlock, nil } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -func (o *DACodecV4) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { - return (&DACodecV3{}).NewDAChunk(chunk, totalL1MessagePoppedBefore) +func (o *DACodecV4) NewDAChunk(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64) (encoding.DAChunk, error) { + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > 255 { + return nil, errors.New("number of blocks exceeds 1 byte") + } + + var blocks []encoding.DABlock + var txs [][]*types.TransactionData + + for _, block := range chunk.Blocks { + b, err := o.NewDABlock(block, totalL1MessagePoppedBefore) + if err != nil { + return nil, err + } + blocks = append(blocks, b) + totalL1MessagePoppedBefore += block.NumL1Messages(totalL1MessagePoppedBefore) + txs = append(txs, block.Transactions) + } + + daChunk := encoding.NewDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + return daChunk, nil } // NewDABatch creates a DABatch from the provided Batch. -func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { +func (o *DACodecV4) NewDABatch(batch *encoding.Batch) (encoding.DABatch, error) { // this encoding can only support a fixed number of chunks per batch if len(batch.Chunks) > Codecv4MaxNumChunks { return nil, errors.New("too many chunks in batch") @@ -63,7 +125,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { } // skipped L1 messages bitmap - bitmapBytes, totalL1MessagePoppedAfter, err := constructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) + bitmapBytes, totalL1MessagePoppedAfter, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore) if err != nil { return nil, err } @@ -77,9 +139,9 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { lastChunk := batch.Chunks[len(batch.Chunks)-1] lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1] - return NewDABatchV2( - uint8(CodecV3), // version - batch.Index, // batchIndex + return encoding.NewDABatchV2( + uint8(encoding.CodecV4), // version + batch.Index, // batchIndex totalL1MessagePoppedAfter-batch.TotalL1MessagePoppedBefore, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped lastBlock.Header.Time, // lastBlockTimestamp @@ -95,7 +157,7 @@ func (o *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { // NewDABatchWithExpectedBlobVersionedHashes creates a DABatch from the provided Batch. // It also checks if the blob versioned hashes are as expected. -func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hashes []common.Hash) (DABatch, error) { +func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *encoding.Batch, hashes []common.Hash) (encoding.DABatch, error) { o.SetCompression(true) daBatch, err := o.NewDABatch(batch) if err != nil || !reflect.DeepEqual(daBatch.BlobVersionedHashes(), hashes) { @@ -114,7 +176,7 @@ func (o *DACodecV4) NewDABatchWithExpectedBlobVersionedHashes(batch *Batch, hash } // constructBlobPayload constructs the 4844 blob payload. -func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { +func (o *DACodecV4) constructBlobPayload(chunks []*encoding.Chunk, useMockTxData bool) (*kzg4844.Blob, common.Hash, *kzg4844.Point, []byte, error) { // metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk) metadataLength := 2 + Codecv4MaxNumChunks*4 @@ -143,7 +205,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // encode L2 txs into blob payload - rlpTxData, err := ConvertTxDataToRLPEncoding(tx, useMockTxData) + rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx, useMockTxData) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -183,7 +245,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } if !useMockTxData { // Check compressed data compatibility. - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Error("constructBlobPayload: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return nil, common.Hash{}, nil, nil, err } @@ -199,7 +261,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* } // convert raw data to BLSFieldElements - blob, err := MakeBlobCanonical(blobBytes) + blob, err := encoding.MakeBlobCanonical(blobBytes) if err != nil { return nil, common.Hash{}, nil, nil, err } @@ -216,7 +278,7 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // compute z = challenge_digest % BLS_MODULUS challengeDigest := crypto.Keccak256Hash(challengePreimage) - pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus) + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), encoding.BLSModulus) pointBytes := pointBigInt.Bytes() // the challenge point z @@ -229,16 +291,16 @@ func (o *DACodecV4) constructBlobPayload(chunks []*Chunk, useMockTxData bool) (* // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields empty. -func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { +func (o *DACodecV4) NewDABatchFromBytes(data []byte) (encoding.DABatch, error) { if len(data) != 193 { return nil, fmt.Errorf("invalid data length for DABatch, expected 193 bytes but got %d", len(data)) } - if CodecVersion(data[0]) != CodecV4 { - return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], CodecV4) + if encoding.CodecVersion(data[0]) != encoding.CodecV4 { + return nil, fmt.Errorf("invalid codec version: %d, expected: %d", data[0], encoding.CodecV4) } - b := NewDABatchV2WithProof( + b := encoding.NewDABatchV2WithProof( data[0], // Version binary.BigEndian.Uint64(data[1:9]), // BatchIndex binary.BigEndian.Uint64(data[9:17]), // L1MessagePopped @@ -261,8 +323,8 @@ func (o *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { } // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a single chunk. -func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) +func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *encoding.Chunk) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -276,12 +338,12 @@ func (o *DACodecV4) EstimateChunkL1CommitBatchSizeAndBlobSize(c *Chunk) (uint64, } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit uncompressed batch size and compressed blob size for a batch. -func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, uint64, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) +func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *encoding.Batch) (uint64, uint64, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return 0, 0, err } @@ -295,12 +357,12 @@ func (o *DACodecV4) EstimateBatchL1CommitBatchSizeAndBlobSize(b *Batch) (uint64, } else { blobBytesLength = 1 + uint64(len(batchBytes)) } - return uint64(len(batchBytes)), CalculatePaddedBlobSize(blobBytesLength), nil + return uint64(len(batchBytes)), encoding.CalculatePaddedBlobSize(blobBytesLength), nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob([]*Chunk{c}, Codecv4MaxNumChunks) +func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *encoding.Chunk) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob([]*encoding.Chunk{c}, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -308,7 +370,7 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error if err != nil { return false, err } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckChunkCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -316,8 +378,8 @@ func (o *DACodecV4) CheckChunkCompressedDataCompatibility(c *Chunk) (bool, error } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - batchBytes, err := ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) +func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *encoding.Batch) (bool, error) { + batchBytes, err := encoding.ConstructBatchPayloadInBlob(b.Chunks, Codecv4MaxNumChunks) if err != nil { return false, err } @@ -325,7 +387,7 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error if err != nil { return false, err } - if err = CheckCompressedDataCompatibility(blobBytes); err != nil { + if err = encoding.CheckCompressedDataCompatibility(blobBytes); err != nil { log.Warn("CheckBatchCompressedDataCompatibility: compressed data compatibility check failed", "err", err, "batchBytes", hex.EncodeToString(batchBytes), "blobBytes", hex.EncodeToString(blobBytes)) return false, nil } @@ -333,23 +395,125 @@ func (o *DACodecV4) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error } // EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. -func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *Chunk) (uint64, error) { - return (&DACodecV3{}).EstimateChunkL1CommitCalldataSize(c) +func (o *DACodecV4) EstimateChunkL1CommitCalldataSize(c *encoding.Chunk) (uint64, error) { + return uint64(encoding.BlockContextByteSize * len(c.Blocks)), nil } // EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *Batch) (uint64, error) { - return (&DACodecV3{}).EstimateBatchL1CommitCalldataSize(b) +func (o *DACodecV4) EstimateBatchL1CommitCalldataSize(b *encoding.Batch) (uint64, error) { + var totalL1CommitCalldataSize uint64 + for _, chunk := range b.Chunks { + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + } + return totalL1CommitCalldataSize, nil +} + +// EstimateBlockL1CommitGas calculates the total L1 commit gas for this block approximately. +func (o *DACodecV4) EstimateBlockL1CommitGas(b *encoding.Block) (uint64, error) { + var total uint64 + var numL1Messages uint64 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + numL1Messages++ + continue + } + } + + total += encoding.CalldataNonZeroByteGas * encoding.BlockContextByteSize + + // sload + total += 2100 * numL1Messages // numL1Messages times cold sload in L1MessageQueue + + // staticcall + total += 100 * numL1Messages // numL1Messages times call to L1MessageQueue + total += 100 * numL1Messages // numL1Messages times warm address access to L1MessageQueue + + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // staticcall to proxy + total += 100 * numL1Messages // read admin in proxy + total += 100 * numL1Messages // read impl in proxy + total += 100 * numL1Messages // access impl + total += encoding.GetMemoryExpansionCost(36) * numL1Messages // delegatecall to impl + + return total, nil } // EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. -func (o *DACodecV4) EstimateChunkL1CommitGas(c *Chunk) (uint64, error) { - return (&DACodecV3{}).EstimateChunkL1CommitGas(c) +func (o *DACodecV4) EstimateChunkL1CommitGas(c *encoding.Chunk) (uint64, error) { + var totalNonSkippedL1Messages uint64 + var totalL1CommitGas uint64 + for _, block := range c.Blocks { + totalNonSkippedL1Messages += uint64(len(block.Transactions)) - block.NumL2Transactions() + blockL1CommitGas, err := o.EstimateBlockL1CommitGas(block) + if err != nil { + return 0, err + } + totalL1CommitGas += blockL1CommitGas + } + + numBlocks := uint64(len(c.Blocks)) + totalL1CommitGas += 100 * numBlocks // numBlocks times warm sload + totalL1CommitGas += encoding.CalldataNonZeroByteGas // numBlocks field of chunk encoding in calldata + + totalL1CommitGas += encoding.GetKeccak256Gas(58*numBlocks + 32*totalNonSkippedL1Messages) // chunk hash + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil } // EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. -func (o *DACodecV4) EstimateBatchL1CommitGas(b *Batch) (uint64, error) { - return (&DACodecV3{}).EstimateBatchL1CommitGas(b) +func (o *DACodecV4) EstimateBatchL1CommitGas(b *encoding.Batch) (uint64, error) { + var totalL1CommitGas uint64 + + // Add extra gas costs + totalL1CommitGas += 100000 // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * 2100 // 4 one-time cold sload for commitBatch + totalL1CommitGas += 20000 // 1 time sstore + totalL1CommitGas += 21000 // base fee for tx + totalL1CommitGas += encoding.CalldataNonZeroByteGas // version in calldata + + // adjusting gas: + // add 1 time cold sload (2100 gas) for L1MessageQueue + // add 1 time cold address access (2600 gas) for L1MessageQueue + // minus 1 time warm sload (100 gas) & 1 time warm address access (100 gas) + totalL1CommitGas += (2100 + 2600 - 100 - 100) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32) // parent batch header hash, length is estimated as 89 (constant part)+ 32 (1 skippedL1MessageBitmap) + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (89 + 32) // parent batch header in calldata + + // adjust batch data hash gas cost + totalL1CommitGas += encoding.GetKeccak256Gas(uint64(32 * len(b.Chunks))) + + totalL1MessagePoppedBefore := b.TotalL1MessagePoppedBefore + + for _, chunk := range b.Chunks { + chunkL1CommitGas, err := o.EstimateChunkL1CommitGas(chunk) + if err != nil { + return 0, err + } + totalL1CommitGas += chunkL1CommitGas + + totalL1MessagePoppedInChunk := chunk.NumL1Messages(totalL1MessagePoppedBefore) + totalL1MessagePoppedBefore += totalL1MessagePoppedInChunk + + totalL1CommitGas += encoding.CalldataNonZeroByteGas * (32 * (totalL1MessagePoppedInChunk + 255) / 256) + totalL1CommitGas += encoding.GetKeccak256Gas(89 + 32*(totalL1MessagePoppedInChunk+255)/256) + + var totalL1CommitCalldataSize uint64 + chunkL1CommitCalldataSize, err := o.EstimateChunkL1CommitCalldataSize(chunk) + if err != nil { + return 0, err + } + totalL1CommitCalldataSize += chunkL1CommitCalldataSize + totalL1CommitGas += encoding.GetMemoryExpansionCost(totalL1CommitCalldataSize) + } + + totalL1CommitGas += 50000 // plus 50000 for the point-evaluation precompile call. + + return totalL1CommitGas, nil } // isCompressEnabled checks if compression is enabled. @@ -370,16 +534,70 @@ func (o *DACodecV4) SetCompression(enable bool) { // Note: The batch hash and batch data hash are two different hashes, // the former is used for identifying a badge in the contracts, // the latter is used in the public input to the provers. -func (o *DACodecV4) computeBatchDataHash(chunks []*Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { - return (&DACodecV3{}).computeBatchDataHash(chunks, totalL1MessagePoppedBefore) +func (o *DACodecV4) computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore uint64) (common.Hash, error) { + var dataBytes []byte + totalL1MessagePoppedBeforeChunk := totalL1MessagePoppedBefore + + for _, chunk := range chunks { + daChunk, err := o.NewDAChunk(chunk, totalL1MessagePoppedBeforeChunk) + if err != nil { + return common.Hash{}, err + } + totalL1MessagePoppedBeforeChunk += chunk.NumL1Messages(totalL1MessagePoppedBeforeChunk) + chunkHash, err := daChunk.Hash() + if err != nil { + return common.Hash{}, err + } + dataBytes = append(dataBytes, chunkHash.Bytes()...) + } + + dataHash := crypto.Keccak256Hash(dataBytes) + return dataHash, nil } // DecodeDAChunks takes a byte slice and decodes it into a []DAChunk -func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]DAChunk, error) { - return (&DACodecV3{}).DecodeDAChunks(bytes) +func (o *DACodecV4) DecodeDAChunks(bytes [][]byte) ([]encoding.DAChunk, error) { + var chunks []encoding.DAChunk + for _, chunk := range bytes { + if len(chunk) < 1 { + return nil, fmt.Errorf("invalid chunk, length is less than 1") + } + + numBlocks := int(chunk[0]) + if len(chunk) < 1+numBlocks*encoding.BlockContextByteSize { + return nil, fmt.Errorf("chunk size doesn't match with numBlocks, byte length of chunk: %v, expected length: %v", len(chunk), 1+numBlocks*encoding.BlockContextByteSize) + } + + blocks := make([]encoding.DABlock, numBlocks) + for i := 0; i < numBlocks; i++ { + startIdx := 1 + i*encoding.BlockContextByteSize // add 1 to skip numBlocks byte + endIdx := startIdx + encoding.BlockContextByteSize + blocks[i] = &encoding.DABlockV0{} + err := blocks[i].Decode(chunk[startIdx:endIdx]) + if err != nil { + return nil, err + } + } + + chunks = append(chunks, encoding.NewDAChunkV1( + blocks, // blocks + nil, // transactions + )) + } + return chunks, nil } // JSONFromBytes converts the bytes to a DABatchV2 and then marshals it to JSON. func (o *DACodecV4) JSONFromBytes(data []byte) ([]byte, error) { - return (&DACodecV3{}).JSONFromBytes(data) + batch, err := o.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON: %w", err) + } + + return jsonBytes, nil } diff --git a/encoding/da.go b/encoding/da.go index b634967..66f34f1 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -406,7 +406,8 @@ func GetMemoryExpansionCost(memoryByteSize uint64) uint64 { return memoryCost } -func getTxPayloadLength(txData *types.TransactionData) (uint64, error) { +// GetTxPayloadLength calculates the length of the transaction payload. +func GetTxPayloadLength(txData *types.TransactionData) (uint64, error) { rlpTxData, err := ConvertTxDataToRLPEncoding(txData, false /* no mock */) if err != nil { return 0, err diff --git a/encoding/interfaces.go b/encoding/interfaces.go index fd133e9..ad25c06 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -69,35 +69,42 @@ const ( CodecV4 ) -// CodecFromVersion returns the appropriate codec for the given version. -func CodecFromVersion(version CodecVersion) (Codec, error) { - switch version { - case CodecV0: - return &DACodecV0{}, nil - case CodecV1: - return &DACodecV1{}, nil - case CodecV2: - return &DACodecV2{}, nil - case CodecV3: - return &DACodecV3{}, nil - case CodecV4: - return &DACodecV4{}, nil - default: +// MyCodecGen is a map that stores codec generator functions for each version. +var MyCodecGen = make(map[CodecVersion]func() Codec) + +// RegisterCodec registers a codec generator function for a specific version. +func RegisterCodec(version CodecVersion, codecGenFunc func() Codec) { + MyCodecGen[version] = codecGenFunc +} + +// getCodec retrieves a Codec instance for the specified version. +// It returns an error if the version is not supported. +func getCodec(version CodecVersion) (Codec, error) { + codecGen, ok := MyCodecGen[version] + if !ok { return nil, fmt.Errorf("unsupported codec version: %d", version) } + return codecGen(), nil +} + +// CodecFromVersion returns the appropriate codec for the given version. +func CodecFromVersion(version CodecVersion) (Codec, error) { + return getCodec(version) } // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. -func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { +func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) (Codec, error) { + var version CodecVersion if chainCfg.IsDarwinV2(startBlockTimestamp) { - return &DACodecV4{} + version = CodecV4 } else if chainCfg.IsDarwin(startBlockTimestamp) { - return &DACodecV3{} + version = CodecV3 } else if chainCfg.IsCurie(startBlockNumber) { - return &DACodecV2{} + version = CodecV2 } else if chainCfg.IsBernoulli(startBlockNumber) { - return &DACodecV1{} + version = CodecV1 } else { - return &DACodecV0{} + version = CodecV0 } + return getCodec(version) }