From 286f2092d4cd9fe8f9b84b38587312de6a148c88 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 27 Dec 2024 12:14:06 +0800 Subject: [PATCH 01/47] add initial CodecV6 and daBatchV6 --- encoding/codecv6.go | 77 ++++++++++++++++++++++ encoding/codecv6_types.go | 135 ++++++++++++++++++++++++++++++++++++++ encoding/da.go | 6 ++ encoding/interfaces.go | 8 ++- 4 files changed, 225 insertions(+), 1 deletion(-) create mode 100644 encoding/codecv6.go create mode 100644 encoding/codecv6_types.go diff --git a/encoding/codecv6.go b/encoding/codecv6.go new file mode 100644 index 0000000..1692abe --- /dev/null +++ b/encoding/codecv6.go @@ -0,0 +1,77 @@ +package encoding + +import ( + "encoding/json" + "fmt" + + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +type DACodecV6 struct { + DACodecV4 +} + +// Version returns the codec version. +func (d *DACodecV6) Version() CodecVersion { + return CodecV6 +} + +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (d *DACodecV6) MaxNumChunksPerBatch() int { + return 1 +} + +// NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. +// Note: For DACodecV6, this function is not implemented since there is no notion of DAChunk in this version. Blobs +// contain the entire batch data, and it is up to a prover to decide the chunk sizes. +func (d *DACodecV6) NewDAChunk(_ *Chunk, _ uint64) (DAChunk, error) { + return nil, nil +} + +// NewDABatch creates a DABatch including blob from the provided Batch. +func (d *DACodecV6) NewDABatch(batch *Batch) (DABatch, error) { + // TODO: create DABatch from the provided batch once the blob layout is defined. See DACodecV4 for reference. + return nil, nil +} + +// NewDABatchFromBytes decodes the given byte slice into a DABatch. +// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. +func (d *DACodecV6) NewDABatchFromBytes(data []byte) (DABatch, error) { + daBatch, err := decodeDABatchV6(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DA batch: %w", err) + } + + if daBatch.version != CodecV6 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV6, daBatch.version) + } + + return daBatch, nil +} + +func (d *DACodecV6) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { + return nil, nil +} + +func (d *DACodecV6) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + return nil +} + +// TODO: add DecodeBlob to interface to decode the blob and transactions or reuse DecodeTxsFromBlob but only have a single "chunk" for all transactions in the batch? + +// TODO: which of the Estimate* functions are needed? + +// JSONFromBytes converts the bytes to a DABatch and then marshals it to JSON. +func (d *DACodecV6) JSONFromBytes(data []byte) ([]byte, error) { + batch, err := d.NewDABatchFromBytes(data) + if err != nil { + return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) + } + + jsonBytes, err := json.Marshal(batch) + if err != nil { + return nil, fmt.Errorf("failed to marshal DABatch to JSON, version %d, hash %s: %w", batch.Version(), batch.Hash(), err) + } + + return jsonBytes, nil +} diff --git a/encoding/codecv6_types.go b/encoding/codecv6_types.go new file mode 100644 index 0000000..88bff8c --- /dev/null +++ b/encoding/codecv6_types.go @@ -0,0 +1,135 @@ +package encoding + +import ( + "encoding/binary" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/crypto" + "github.com/scroll-tech/go-ethereum/crypto/kzg4844" +) + +// daBatchV3 contains metadata about a batch of DAChunks. +type daBatchV6 struct { + version CodecVersion + batchIndex uint64 + parentBatchHash common.Hash + blobVersionedHash common.Hash + + blob *kzg4844.Blob + z *kzg4844.Point + blobBytes []byte +} + +// newDABatchV6 is a constructor for daBatchV6 that calls blobDataProofForPICircuit internally. +func newDABatchV6(version CodecVersion, batchIndex uint64, parentBatchHash, blobVersionedHash common.Hash, blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte) (*daBatchV6, error) { + daBatch := &daBatchV6{ + version: version, + batchIndex: batchIndex, + parentBatchHash: parentBatchHash, + blobVersionedHash: blobVersionedHash, + blob: blob, + z: z, + blobBytes: blobBytes, + } + + return daBatch, nil +} + +func decodeDABatchV6(data []byte) (*daBatchV6, error) { + if len(data) != daBatchV6EncodedLength { + return nil, fmt.Errorf("invalid data length for DABatchV6, expected %d bytes but got %d", daBatchV6EncodedLength, len(data)) + } + + version := CodecVersion(data[daBatchOffsetVersion]) + batchIndex := binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV6OffsetBlobVersionedHash]) + blobVersionedHash := common.BytesToHash(data[daBatchV6OffsetBlobVersionedHash:daBatchV6OffsetParentBatchHash]) + parentBatchHash := common.BytesToHash(data[daBatchV6OffsetParentBatchHash:daBatchV6EncodedLength]) + + return newDABatchV6(version, batchIndex, parentBatchHash, blobVersionedHash, nil, nil, nil) +} + +// Encode serializes the DABatchV3 into bytes. +func (b *daBatchV6) Encode() []byte { + batchBytes := make([]byte, daBatchV6EncodedLength) + batchBytes[daBatchOffsetVersion] = byte(b.version) + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV6OffsetBlobVersionedHash], b.batchIndex) + copy(batchBytes[daBatchV6OffsetBlobVersionedHash:daBatchV6OffsetParentBatchHash], b.blobVersionedHash[:]) + copy(batchBytes[daBatchV6OffsetParentBatchHash:daBatchV6EncodedLength], b.parentBatchHash[:]) + return batchBytes +} + +// Hash computes the hash of the serialized DABatch. +func (b *daBatchV6) Hash() common.Hash { + return crypto.Keccak256Hash(b.Encode()) +} + +// BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +func (b *daBatchV6) BlobDataProofForPointEvaluation() ([]byte, error) { + if b.blob == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") + } + if b.z == nil { + return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") + } + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + + proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) + } + + return blobDataProofFromValues(*b.z, y, commitment, proof), nil +} + +// Blob returns the blob of the batch. +func (b *daBatchV6) Blob() *kzg4844.Blob { + return b.blob +} + +// BlobBytes returns the blob bytes of the batch. +func (b *daBatchV6) BlobBytes() []byte { + return b.blobBytes +} + +// MarshalJSON implements the custom JSON serialization for daBatchV3. +// This method is designed to provide prover with batch info in snake_case format. +func (b *daBatchV6) MarshalJSON() ([]byte, error) { + type daBatchV6JSON struct { + Version CodecVersion `json:"version"` + BatchIndex uint64 `json:"batch_index"` + BlobVersionedHash string `json:"blob_versioned_hash"` + ParentBatchHash string `json:"parent_batch_hash"` + } + + return json.Marshal(&daBatchV6JSON{ + Version: b.version, + BatchIndex: b.batchIndex, + BlobVersionedHash: b.blobVersionedHash.Hex(), + ParentBatchHash: b.parentBatchHash.Hex(), + }) +} + +// Version returns the version of the DABatch. +func (b *daBatchV6) Version() CodecVersion { + return b.version +} + +// SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. +// For daBatchV6, there is no skipped L1 message bitmap. +func (b *daBatchV6) SkippedL1MessageBitmap() []byte { + return nil +} + +// DataHash returns the data hash of the DABatch. +// For daBatchV6, there is no data hash. +func (b *daBatchV6) DataHash() common.Hash { + return common.Hash{} +} diff --git a/encoding/da.go b/encoding/da.go index 480679d..8642936 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -77,6 +77,12 @@ const ( daBatchV3EncodedLength = 193 ) +const ( + daBatchV6EncodedLength = 73 + daBatchV6OffsetBlobVersionedHash = 9 + daBatchV6OffsetParentBatchHash = 41 +) + const ( payloadLengthBytes = 4 calldataNonZeroByteGas = 16 diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 6461a30..3c9d415 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -76,6 +76,8 @@ const ( CodecV2 CodecV3 CodecV4 + _ // CodecV5 is skipped + CodecV6 ) // CodecFromVersion returns the appropriate codec for the given version. @@ -91,6 +93,8 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { return &DACodecV3{}, nil case CodecV4: return &DACodecV4{}, nil + case CodecV6: + return &DACodecV6{}, nil default: return nil, fmt.Errorf("unsupported codec version: %v", version) } @@ -98,7 +102,9 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { - if chainCfg.IsDarwinV2(startBlockTimestamp) { + if chainCfg.IsDarwinV2(startBlockTimestamp) { // TODO: replace with correct fork + return &DACodecV6{} + } else if chainCfg.IsDarwinV2(startBlockTimestamp) { return &DACodecV4{} } else if chainCfg.IsDarwin(startBlockTimestamp) { return &DACodecV3{} From 676784534011edf71928c432abec01d79a963480 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Faruk=20Irmak?= Date: Tue, 7 Jan 2025 15:58:42 +0300 Subject: [PATCH 02/47] feat: add codecv5 and codecv6 for Euclid fork --- encoding/codecv4.go | 10 +++++++--- encoding/codecv5.go | 19 +++++++++++++++++++ encoding/codecv6.go | 14 ++++++++++++++ encoding/da.go | 22 ++++++++++++++++++---- encoding/da_test.go | 7 +++++++ encoding/interfaces.go | 10 +++++++++- go.mod | 4 ++-- go.sum | 12 ++++++------ 8 files changed, 82 insertions(+), 16 deletions(-) create mode 100644 encoding/codecv5.go create mode 100644 encoding/codecv6.go diff --git a/encoding/codecv4.go b/encoding/codecv4.go index 4d51fe5..fb0cc64 100644 --- a/encoding/codecv4.go +++ b/encoding/codecv4.go @@ -20,10 +20,14 @@ import ( type DACodecV4 struct { DACodecV3 + forcedVersion *CodecVersion } // Version returns the codec version. func (d *DACodecV4) Version() CodecVersion { + if d.forcedVersion != nil { + return *d.forcedVersion + } return CodecV4 } @@ -90,7 +94,7 @@ func (d *DACodecV4) NewDABatch(batch *Batch) (DABatch, error) { l1MessagePopped := totalL1MessagePoppedAfter - batch.TotalL1MessagePoppedBefore return newDABatchV3( - CodecV4, // version + d.Version(), // version batch.Index, // batchIndex l1MessagePopped, // l1MessagePopped totalL1MessagePoppedAfter, // totalL1MessagePopped @@ -112,8 +116,8 @@ func (d *DACodecV4) NewDABatchFromBytes(data []byte) (DABatch, error) { return nil, fmt.Errorf("invalid data length for DABatch, expected %d bytes but got %d", daBatchV3EncodedLength, len(data)) } - if CodecVersion(data[daBatchOffsetVersion]) != CodecV4 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV4, data[daBatchOffsetVersion]) + if CodecVersion(data[daBatchOffsetVersion]) != d.Version() { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", d.Version(), data[daBatchOffsetVersion]) } return newDABatchV3WithProof( diff --git a/encoding/codecv5.go b/encoding/codecv5.go new file mode 100644 index 0000000..91e2a59 --- /dev/null +++ b/encoding/codecv5.go @@ -0,0 +1,19 @@ +package encoding + +type DACodecV5 struct { + DACodecV4 +} + +func NewDACodecV5() *DACodecV5 { + v := CodecV5 + return &DACodecV5{ + DACodecV4: DACodecV4{ + forcedVersion: &v, + }, + } +} + +// MaxNumChunksPerBatch returns the maximum number of chunks per batch. +func (d *DACodecV5) MaxNumChunksPerBatch() int { + return 1 +} diff --git a/encoding/codecv6.go b/encoding/codecv6.go new file mode 100644 index 0000000..819752c --- /dev/null +++ b/encoding/codecv6.go @@ -0,0 +1,14 @@ +package encoding + +type DACodecV6 struct { + DACodecV4 +} + +func NewDACodecV6() *DACodecV6 { + v := CodecV6 + return &DACodecV6{ + DACodecV4: DACodecV4{ + forcedVersion: &v, + }, + } +} diff --git a/encoding/da.go b/encoding/da.go index 480679d..90bc979 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -5,6 +5,7 @@ import ( "encoding/binary" "fmt" "math/big" + "slices" "github.com/klauspost/compress/zstd" @@ -226,6 +227,15 @@ func (c *Chunk) CrcMax() (uint64, error) { // Map sub-circuit name to row count crc := make(map[string]uint64) + // if no blocks have row consumption, this is an euclid chunk + isEuclidChunk := slices.IndexFunc(c.Blocks, func(block *Block) bool { + return block.RowConsumption != nil + }) == -1 + + if isEuclidChunk { + return 0, nil + } + // Iterate over blocks, accumulate row consumption for _, block := range c.Blocks { if block.RowConsumption == nil { @@ -633,8 +643,10 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin return "curie" } else if !config.IsDarwinV2(blockTimestamp) { return "darwin" - } else { + } else if !config.IsEuclid(blockTimestamp) { return "darwinV2" + } else { + return "euclid" } } @@ -649,8 +661,10 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin return CodecV2 } else if !config.IsDarwinV2(blockTimestamp) { return CodecV3 - } else { + } else if !config.IsEuclid(blockTimestamp) { return CodecV4 + } else { + return CodecV6 } } @@ -679,7 +693,7 @@ func GetChunkEnableCompression(codecVersion CodecVersion, chunk *Chunk) (bool, e return false, nil case CodecV2, CodecV3: return true, nil - case CodecV4: + case CodecV4, CodecV5, CodecV6: return CheckChunkCompressedDataCompatibility(chunk, codecVersion) default: return false, fmt.Errorf("unsupported codec version: %v", codecVersion) @@ -693,7 +707,7 @@ func GetBatchEnableCompression(codecVersion CodecVersion, batch *Batch) (bool, e return false, nil case CodecV2, CodecV3: return true, nil - case CodecV4: + case CodecV4, CodecV5, CodecV6: return CheckBatchCompressedDataCompatibility(batch, codecVersion) default: return false, fmt.Errorf("unsupported codec version: %v", codecVersion) diff --git a/encoding/da_test.go b/encoding/da_test.go index dbfbaf1..f301ed0 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -78,6 +78,13 @@ func TestUtilFunctions(t *testing.T) { assert.Equal(t, uint64(5), chunk3.NumTransactions()) assert.Equal(t, uint64(240000), chunk3.TotalGasUsed()) + // euclid chunk + chunk3.Blocks[0].RowConsumption = nil + chunk3.Blocks[1].RowConsumption = nil + crc3Max, err = chunk3.CrcMax() + assert.NoError(t, err) + assert.Equal(t, uint64(0), crc3Max) + // Test Batch methods assert.Equal(t, block6.Header.Root, batch.StateRoot()) assert.Equal(t, block6.WithdrawRoot, batch.WithdrawRoot()) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 6461a30..e845102 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -76,6 +76,8 @@ const ( CodecV2 CodecV3 CodecV4 + CodecV5 + CodecV6 ) // CodecFromVersion returns the appropriate codec for the given version. @@ -91,6 +93,10 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { return &DACodecV3{}, nil case CodecV4: return &DACodecV4{}, nil + case CodecV5: + return NewDACodecV5(), nil + case CodecV6: + return NewDACodecV6(), nil default: return nil, fmt.Errorf("unsupported codec version: %v", version) } @@ -98,7 +104,9 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { - if chainCfg.IsDarwinV2(startBlockTimestamp) { + if chainCfg.IsEuclid(startBlockTimestamp) { + return NewDACodecV6() + } else if chainCfg.IsDarwinV2(startBlockTimestamp) { return &DACodecV4{} } else if chainCfg.IsDarwin(startBlockTimestamp) { return &DACodecV3{} diff --git a/go.mod b/go.mod index 80d5fc7..3b098f2 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/agiledragon/gomonkey/v2 v2.12.0 - github.com/scroll-tech/go-ethereum v1.10.14-0.20241210104312-bdf64cfb39dc + github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e github.com/stretchr/testify v1.9.0 ) @@ -31,7 +31,7 @@ require ( github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/yusufpapurcu/wmi v1.2.3 // indirect - golang.org/x/crypto v0.17.0 // indirect + golang.org/x/crypto v0.21.0 // indirect golang.org/x/sync v0.6.0 // indirect golang.org/x/sys v0.21.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index ba37004..48b0e95 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,5 @@ -github.com/VictoriaMetrics/fastcache v1.12.1 h1:i0mICQuojGDL3KblA7wUNlY5lOK6a4bwt3uRKnkZU40= -github.com/VictoriaMetrics/fastcache v1.12.1/go.mod h1:tX04vaqcNoQeGLD+ra5pU5sWkuxnzWhEzLwhP9w653o= +github.com/VictoriaMetrics/fastcache v1.12.2 h1:N0y9ASrJ0F6h0QaC3o6uJb3NIZ9VKLjCM7NQbSmF7WI= +github.com/VictoriaMetrics/fastcache v1.12.2/go.mod h1:AmC+Nzz1+3G2eCPapF6UcsnkThDcMsQicp4xDukwJYI= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= github.com/agiledragon/gomonkey/v2 v2.12.0 h1:ek0dYu9K1rSV+TgkW5LvNNPRWyDZVIxGMCFI6Pz9o38= github.com/agiledragon/gomonkey/v2 v2.12.0/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY= @@ -78,8 +78,8 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20241210104312-bdf64cfb39dc h1:ofQxDFg5aW0ANJcEXt5RJy5lDWz8jdKwKcZhEqvDjx8= -github.com/scroll-tech/go-ethereum v1.10.14-0.20241210104312-bdf64cfb39dc/go.mod h1:xRDJvaNUe7lCU2fB+AqyS7gahar+dfJPrUJplfXF4dw= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e h1:g8jtcGiHbjWYh/V7O245IDho3WfQT4CwEpBV+MhYDrg= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= @@ -100,8 +100,8 @@ github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFi github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= +golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= From cc9561b1215c2d91e72ef50ef0efece2175f7e08 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 21 Jan 2025 09:45:47 +0800 Subject: [PATCH 03/47] implement blob encoding and decoding according to new blob layout --- encoding/codecv6.go | 243 +++++++++++++++++++++++++++++++++++-- encoding/codecv6_types.go | 248 ++++++++++++++++++++++++++++++++++---- encoding/da.go | 19 +++ 3 files changed, 479 insertions(+), 31 deletions(-) diff --git a/encoding/codecv6.go b/encoding/codecv6.go index 1692abe..204f5eb 100644 --- a/encoding/codecv6.go +++ b/encoding/codecv6.go @@ -1,15 +1,21 @@ package encoding import ( + "crypto/sha256" + "encoding/hex" "encoding/json" + "errors" "fmt" + "math" + "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" + "github.com/scroll-tech/go-ethereum/log" + + "github.com/scroll-tech/da-codec/encoding/zstd" ) -type DACodecV6 struct { - DACodecV4 -} +type DACodecV6 struct{} // Version returns the codec version. func (d *DACodecV6) Version() CodecVersion { @@ -21,6 +27,37 @@ func (d *DACodecV6) MaxNumChunksPerBatch() int { return 1 } +// NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. +func (d *DACodecV6) NewDABlock(block *Block, _ uint64) (DABlock, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + // note: numL1Messages includes skipped messages + numL1Messages := block.NumL1MessagesNoSkipping() + if numL1Messages > math.MaxUint16 { + return nil, errors.New("number of L1 messages exceeds max uint16") + } + + // note: numTransactions includes skipped messages + numL2Transactions := block.NumL2Transactions() + numTransactions := uint64(numL1Messages) + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + daBlock := newDABlockV6( + block.Header.Number.Uint64(), // number + block.Header.Time, // timestamp + block.Header.BaseFee, // baseFee + block.Header.GasLimit, // gasLimit + uint16(numTransactions), // numTransactions + numL1Messages, // numL1Messages + ) + + return daBlock, nil +} + // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. // Note: For DACodecV6, this function is not implemented since there is no notion of DAChunk in this version. Blobs // contain the entire batch data, and it is up to a prover to decide the chunk sizes. @@ -30,8 +67,92 @@ func (d *DACodecV6) NewDAChunk(_ *Chunk, _ uint64) (DAChunk, error) { // NewDABatch creates a DABatch including blob from the provided Batch. func (d *DACodecV6) NewDABatch(batch *Batch) (DABatch, error) { - // TODO: create DABatch from the provided batch once the blob layout is defined. See DACodecV4 for reference. - return nil, nil + if len(batch.Chunks) != 0 { + return nil, errors.New("batch must not contain any chunks") + } + + if len(batch.Blocks) == 0 { + return nil, errors.New("batch must contain at least one block") + } + + blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch) + if err != nil { + return nil, fmt.Errorf("failed to construct blob: %w", err) + } + + daBatch, err := newDABatchV6(CodecV6, batch.Index, batch.ParentBatchHash, blobVersionedHash, blob, blobBytes) + if err != nil { + return nil, fmt.Errorf("failed to construct DABatch: %w", err) + } + + return daBatch, nil +} + +func (d *DACodecV6) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) { + enableCompression, err := d.CheckBatchCompressedDataCompatibility(batch) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) + } + + blobBytes := make([]byte, blobEnvelopeV7PayloadOffset) + blobBytes[blobEnvelopeV7VersionOffset] = uint8(CodecV6) + + payloadBytes, err := d.constructBlobPayload(batch) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to construct blob payload: %w", err) + } + + if enableCompression { + // compressedPayloadBytes represents the compressed blob payload + compressedPayloadBytes, err := zstd.CompressScrollBatchBytes(payloadBytes) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to compress blob payload: %w", err) + } + // Check compressed data compatibility. + if err = checkCompressedDataCompatibility(compressedPayloadBytes); err != nil { + log.Error("ConstructBlob: compressed data compatibility check failed", "err", err, "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) + return nil, common.Hash{}, nil, err + } + blobBytes[blobEnvelopeV7CompressedFlagOffset] = 0x1 + payloadBytes = compressedPayloadBytes + } else { + blobBytes[blobEnvelopeV7CompressedFlagOffset] = 0x0 + } + + sizeSlice := encodeSize3Bytes(uint32(len(payloadBytes))) + copy(blobBytes[blobEnvelopeV7ByteSizeOffset:blobEnvelopeV7CompressedFlagOffset], sizeSlice) + blobBytes = append(blobBytes, payloadBytes...) + + if len(blobBytes) > maxEffectiveBlobBytes { + log.Error("ConstructBlob: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) + return nil, common.Hash{}, nil, errors.New("blob exceeds maximum size") + } + + // convert raw data to BLSFieldElements + blob, err := makeBlobCanonical(blobBytes) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to convert blobBytes to canonical form: %w", err) + } + + // compute blob versioned hash + c, err := kzg4844.BlobToCommitment(blob) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c) + + return blob, blobVersionedHash, blobBytes, nil +} + +func (d *DACodecV6) constructBlobPayload(batch *Batch) ([]byte, error) { + blobPayload := blobPayloadV6{ + initialL1MessageIndex: batch.InitialL1MessageIndex, + initialL1MessageQueueHash: batch.InitialL1MessageQueueHash, + lastL1MessageQueueHash: batch.LastL1MessageQueueHash, + blocks: batch.Blocks, + } + + return blobPayload.Encode() } // NewDABatchFromBytes decodes the given byte slice into a DABatch. @@ -49,18 +170,126 @@ func (d *DACodecV6) NewDABatchFromBytes(data []byte) (DABatch, error) { return daBatch, nil } -func (d *DACodecV6) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV6) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { return nil, nil } func (d *DACodecV6) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + rawBytes := bytesFromBlobCanonical(blob) + + // read the blob envelope header + version := rawBytes[blobEnvelopeV7VersionOffset] + if CodecVersion(version) != CodecV6 { + return fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV6, version) + } + + // read the data size + blobEnvelopeSize := decodeSize3Bytes(rawBytes[blobEnvelopeV7ByteSizeOffset:blobEnvelopeV7CompressedFlagOffset]) + if blobEnvelopeSize+blobEnvelopeV7PayloadOffset > uint32(len(rawBytes)) { + return fmt.Errorf("blob envelope size exceeds the raw data size: %d > %d", blobEnvelopeSize, len(rawBytes)) + } + + payloadBytes := rawBytes[blobEnvelopeV7PayloadOffset : blobEnvelopeV7PayloadOffset+blobEnvelopeSize] + + // read the compressed flag and decompress if needed + compressed := rawBytes[blobEnvelopeV7CompressedFlagOffset] + if compressed == 0x1 { + var err error + if payloadBytes, err = decompressV6Bytes(payloadBytes); err != nil { + return fmt.Errorf("failed to decompress blob payload: %w", err) + } + } + + // read the payload + payload, err := decodeBlobPayloadV6(payloadBytes) + if err != nil { + return fmt.Errorf("failed to decode blob payload: %w", err) + } + + chunks = append(chunks, &DAChunkRawTx{ + Blocks: payload.daBlocks, + Transactions: payload.transactions, + }) + return nil } -// TODO: add DecodeBlob to interface to decode the blob and transactions or reuse DecodeTxsFromBlob but only have a single "chunk" for all transactions in the batch? +// checkCompressedDataCompatibility checks the compressed data compatibility for a batch. +// It constructs a blob payload, compresses the data, and checks the compressed data compatibility. +func (d *DACodecV6) checkCompressedDataCompatibility(batch *Batch) (bool, error) { + payloadBytes, err := d.constructBlobPayload(batch) + if err != nil { + return false, fmt.Errorf("failed to construct blob payload: %w", err) + } + + compressedPayloadBytes, err := zstd.CompressScrollBatchBytes(payloadBytes) + if err != nil { + return false, fmt.Errorf("failed to compress blob payload: %w", err) + } + + if err = checkCompressedDataCompatibility(compressedPayloadBytes); err != nil { + log.Warn("Compressed data compatibility check failed", "err", err, "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) + return false, nil + } + + // check if compressed data is bigger or equal to the original data -> no need to compress + if len(compressedPayloadBytes) >= len(payloadBytes) { + log.Warn("Compressed data is bigger or equal to the original data", "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) + return false, nil + } + + return true, nil +} + +// CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. +// Note: For DACodecV6, this function is not implemented since there is no notion of DAChunk in this version. Blobs +// contain the entire batch data, and it is up to a prover to decide the chunk sizes. +func (d *DACodecV6) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error) { + return true, nil +} + +// CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. +func (d *DACodecV6) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + return d.checkCompressedDataCompatibility(b) +} // TODO: which of the Estimate* functions are needed? +func (d *DACodecV6) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + +func (d *DACodecV6) EstimateBatchL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + +func (d *DACodecV6) EstimateBlockL1CommitCalldataSize(block *Block) (uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + +func (d *DACodecV6) EstimateChunkL1CommitCalldataSize(chunk *Chunk) (uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + +func (d *DACodecV6) EstimateChunkL1CommitGas(chunk *Chunk) (uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + +func (d *DACodecV6) EstimateBatchL1CommitGas(batch *Batch) (uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + +func (d *DACodecV6) EstimateBatchL1CommitCalldataSize(batch *Batch) (uint64, error) { + //TODO implement me after contracts are implemented + panic("implement me") +} + // JSONFromBytes converts the bytes to a DABatch and then marshals it to JSON. func (d *DACodecV6) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) diff --git a/encoding/codecv6_types.go b/encoding/codecv6_types.go index 88bff8c..30e9505 100644 --- a/encoding/codecv6_types.go +++ b/encoding/codecv6_types.go @@ -1,17 +1,45 @@ package encoding import ( + "bytes" "encoding/binary" - "encoding/hex" "encoding/json" - "errors" "fmt" + "math/big" + "github.com/klauspost/compress/zstd" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +const ( + blobEnvelopeV7VersionOffset = 0 + blobEnvelopeV7ByteSizeOffset = 1 + blobEnvelopeV7CompressedFlagOffset = 4 + blobEnvelopeV7PayloadOffset = 5 +) + +const ( + blobPayloadV6EncodedLength = 8 + 2*common.HashLength + 8 + 2 + blobPayloadV6OffsetInitialL1MessageIndex = 0 + blobPayloadV6OffsetInitialL1MessageQueue = blobPayloadV6OffsetInitialL1MessageIndex + 8 + blobPayloadV6OffsetLastL1MessageQueue = blobPayloadV6OffsetInitialL1MessageQueue + common.HashLength + blobPayloadV6OffsetInitialL2BlockNumber = blobPayloadV6OffsetLastL1MessageQueue + common.HashLength + blobPayloadV6OffsetNumBlocks = blobPayloadV6OffsetInitialL2BlockNumber + 8 + blobPayloadV6OffsetBlocks = blobPayloadV6OffsetNumBlocks + 2 +) + +const ( + daBlockV6BlockContextByteSize = 52 + daBlockV6OffsetTimestamp = 0 + daBlockV6OffsetBaseFee = daBlockV6OffsetTimestamp + 8 + daBlockV6OffsetGasLimit = daBlockV6OffsetBaseFee + 32 + daBlockV6numTransactionsOffset = daBlockV6OffsetGasLimit + 8 + daBlockV6numL1MessagesOffset = daBlockV6numTransactionsOffset + 2 +) + // daBatchV3 contains metadata about a batch of DAChunks. type daBatchV6 struct { version CodecVersion @@ -20,19 +48,17 @@ type daBatchV6 struct { blobVersionedHash common.Hash blob *kzg4844.Blob - z *kzg4844.Point blobBytes []byte } // newDABatchV6 is a constructor for daBatchV6 that calls blobDataProofForPICircuit internally. -func newDABatchV6(version CodecVersion, batchIndex uint64, parentBatchHash, blobVersionedHash common.Hash, blob *kzg4844.Blob, z *kzg4844.Point, blobBytes []byte) (*daBatchV6, error) { +func newDABatchV6(version CodecVersion, batchIndex uint64, parentBatchHash, blobVersionedHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV6, error) { daBatch := &daBatchV6{ version: version, batchIndex: batchIndex, parentBatchHash: parentBatchHash, blobVersionedHash: blobVersionedHash, blob: blob, - z: z, blobBytes: blobBytes, } @@ -49,7 +75,7 @@ func decodeDABatchV6(data []byte) (*daBatchV6, error) { blobVersionedHash := common.BytesToHash(data[daBatchV6OffsetBlobVersionedHash:daBatchV6OffsetParentBatchHash]) parentBatchHash := common.BytesToHash(data[daBatchV6OffsetParentBatchHash:daBatchV6EncodedLength]) - return newDABatchV6(version, batchIndex, parentBatchHash, blobVersionedHash, nil, nil, nil) + return newDABatchV6(version, batchIndex, parentBatchHash, blobVersionedHash, nil, nil) } // Encode serializes the DABatchV3 into bytes. @@ -68,25 +94,9 @@ func (b *daBatchV6) Hash() common.Hash { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. +// Note: This method is not implemented for daBatchV6. func (b *daBatchV6) BlobDataProofForPointEvaluation() ([]byte, error) { - if b.blob == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty blob") - } - if b.z == nil { - return nil, errors.New("called BlobDataProofForPointEvaluation with empty z") - } - - commitment, err := kzg4844.BlobToCommitment(b.blob) - if err != nil { - return nil, fmt.Errorf("failed to create blob commitment: %w", err) - } - - proof, y, err := kzg4844.ComputeProof(b.blob, *b.z) - if err != nil { - return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(b.z[:])) - } - - return blobDataProofFromValues(*b.z, y, commitment, proof), nil + return nil, nil } // Blob returns the blob of the batch. @@ -133,3 +143,193 @@ func (b *daBatchV6) SkippedL1MessageBitmap() []byte { func (b *daBatchV6) DataHash() common.Hash { return common.Hash{} } + +type blobPayloadV6 struct { + initialL1MessageIndex uint64 + initialL1MessageQueueHash common.Hash + lastL1MessageQueueHash common.Hash + + // used for encoding + blocks []*Block + + // used for decoding + daBlocks []DABlock + transactions []types.Transactions +} + +func (b *blobPayloadV6) Encode() ([]byte, error) { + payloadBytes := make([]byte, blobPayloadV6EncodedLength) + + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV6OffsetInitialL1MessageIndex:blobPayloadV6OffsetInitialL1MessageQueue], b.initialL1MessageIndex) + copy(payloadBytes[blobPayloadV6OffsetInitialL1MessageQueue:blobPayloadV6OffsetLastL1MessageQueue], b.initialL1MessageQueueHash[:]) + copy(payloadBytes[blobPayloadV6OffsetLastL1MessageQueue:blobPayloadV6OffsetInitialL2BlockNumber], b.lastL1MessageQueueHash[:]) + + blockNumber := b.blocks[0].Header.Number.Uint64() + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV6OffsetInitialL2BlockNumber:blobPayloadV6OffsetNumBlocks], blockNumber) + binary.BigEndian.PutUint16(payloadBytes[blobPayloadV6OffsetNumBlocks:blobPayloadV6OffsetBlocks], uint16(len(b.blocks))) + + var transactionBytes []byte + for _, block := range b.blocks { + daBlock := newDABlockV6(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), block.NumL1MessagesNoSkipping()) + payloadBytes = append(payloadBytes, daBlock.Encode()...) + + // encode L2 txs as RLP and append to transactionBytes + for _, tx := range block.Transactions { + if tx.Type == types.L1MessageTxType { + continue + } + + rlpTxData, err := convertTxDataToRLPEncoding(tx) + if err != nil { + return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + } + transactionBytes = append(transactionBytes, rlpTxData...) + } + } + payloadBytes = append(payloadBytes, transactionBytes...) + + return payloadBytes, nil +} + +func decodeBlobPayloadV6(data []byte) (*blobPayloadV6, error) { + if len(data) < blobPayloadV6EncodedLength { + return nil, fmt.Errorf("invalid data length for blobPayloadV6, expected at least %d bytes but got %d", blobPayloadV6EncodedLength, len(data)) + } + + initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV6OffsetInitialL1MessageIndex:blobPayloadV6OffsetInitialL1MessageQueue]) + initialL1MessageQueueHash := common.BytesToHash(data[blobPayloadV6OffsetInitialL1MessageQueue:blobPayloadV6OffsetLastL1MessageQueue]) + lastL1MessageQueueHash := common.BytesToHash(data[blobPayloadV6OffsetLastL1MessageQueue:blobPayloadV6OffsetInitialL2BlockNumber]) + + initialL2BlockNumber := binary.BigEndian.Uint64(data[blobPayloadV6OffsetInitialL2BlockNumber:blobPayloadV6OffsetNumBlocks]) + numBlocks := int(binary.BigEndian.Uint16(data[blobPayloadV6OffsetNumBlocks:blobPayloadV6OffsetBlocks])) + + // decode DA Blocks from the blob + daBlocks := make([]DABlock, numBlocks) + for i := uint64(0); i < uint64(numBlocks); i++ { + daBlock := newDABlockV6WithNumber(initialL2BlockNumber + i) + + startBytes := blobPayloadV6OffsetBlocks + i*daBlockV6BlockContextByteSize + endBytes := startBytes + daBlockV6BlockContextByteSize + if err := daBlock.Decode(data[startBytes:endBytes]); err != nil { + return nil, fmt.Errorf("failed to decode DA block: %w", err) + } + + daBlocks = append(daBlocks, daBlock) + } + + // decode transactions for each block from the blob + txBytes := data[blobPayloadV6OffsetBlocks+daBlockV6BlockContextByteSize*numBlocks:] + curIndex := 0 + var transactions []types.Transactions + + for _, daBlock := range daBlocks { + var blockTransactions types.Transactions + txNum := int(daBlock.NumTransactions()) - int(daBlock.NumL1Messages()) + if txNum < 0 { + return nil, fmt.Errorf("invalid transaction count: NumL1Messages (%d) exceeds NumTransactions (%d)", daBlock.NumL1Messages(), daBlock.NumTransactions()) + } + + for i := 0; i < txNum; i++ { + tx, nextIndex, err := getNextTx(txBytes, curIndex) + if err != nil { + return nil, fmt.Errorf("couldn't decode next tx from blob bytes: %w, index: %d", err, curIndex+4) + } + curIndex = nextIndex + blockTransactions = append(blockTransactions, tx) + } + + transactions = append(transactions, blockTransactions) + } + + return &blobPayloadV6{ + initialL1MessageIndex: initialL1MessageIndex, + initialL1MessageQueueHash: initialL1MessageQueueHash, + lastL1MessageQueueHash: lastL1MessageQueueHash, + daBlocks: daBlocks, + transactions: transactions, + }, nil +} + +type daBlockV6 struct { + daBlockV0 +} + +// newDABlockV6 is a constructor function for daBlockV6 that initializes the internal fields. +func newDABlockV6(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV6 { + return &daBlockV6{ + daBlockV0: daBlockV0{ + number: number, + timestamp: timestamp, + baseFee: baseFee, + gasLimit: gasLimit, + numTransactions: numTransactions, + numL1Messages: numL1Messages, + }, + } +} + +func newDABlockV6WithNumber(number uint64) *daBlockV6 { + return &daBlockV6{ + daBlockV0: daBlockV0{ + number: number, + }, + } +} + +// Encode serializes the DABlock into a slice of bytes. +func (b *daBlockV6) Encode() []byte { + daBlockBytes := make([]byte, daBlockV6BlockContextByteSize) + binary.BigEndian.PutUint64(daBlockBytes[daBlockV6OffsetTimestamp:daBlockV6OffsetBaseFee], b.timestamp) + if b.baseFee != nil { + b.baseFee.FillBytes(daBlockBytes[daBlockV6OffsetBaseFee:daBlockV6OffsetGasLimit]) + } + binary.BigEndian.PutUint64(daBlockBytes[daBlockV6OffsetGasLimit:daBlockV6numTransactionsOffset], b.gasLimit) + binary.BigEndian.PutUint16(daBlockBytes[daBlockV6numTransactionsOffset:daBlockV6numL1MessagesOffset], b.numTransactions) + binary.BigEndian.PutUint16(daBlockBytes[daBlockV6numL1MessagesOffset:], b.numL1Messages) + return daBlockBytes +} + +// Decode populates the fields of a DABlock from a byte slice. +func (b *daBlockV6) Decode(data []byte) error { + if len(data) != daBlockV6BlockContextByteSize { + return fmt.Errorf("block encoding is not blockContextByteSize bytes long expected %d, got %d", daBlockV6BlockContextByteSize, len(data)) + } + + b.timestamp = binary.BigEndian.Uint64(data[daBlockV6OffsetTimestamp:daBlockV6OffsetBaseFee]) + b.baseFee = new(big.Int).SetBytes(data[daBlockV6OffsetBaseFee:daBlockV6OffsetGasLimit]) + b.gasLimit = binary.BigEndian.Uint64(data[daBlockV6OffsetGasLimit:daBlockV6numTransactionsOffset]) + b.numTransactions = binary.BigEndian.Uint16(data[daBlockV6numTransactionsOffset:daBlockV6numL1MessagesOffset]) + b.numL1Messages = binary.BigEndian.Uint16(data[daBlockV6numL1MessagesOffset:]) + + return nil +} + +// decompressV6Bytes decompresses the given blob bytes into the original payload bytes. +func decompressV6Bytes(compressedBytes []byte) ([]byte, error) { + var res []byte + + r := bytes.NewReader(compressedBytes) + zr, err := zstd.NewReader(r) + if err != nil { + return nil, fmt.Errorf("failed to create zstd reader: %w", err) + } + defer zr.Close() + + res, err = zr.DecodeAll(compressedBytes, res) + if err != nil { + return nil, fmt.Errorf("failed to decompress zstd data: %w", err) + } + if len(res) == 0 { + return nil, fmt.Errorf("payload is empty after decompression") + } + + return res, nil +} + +func decodeSize3Bytes(data []byte) uint32 { + return uint32(data[0]) | uint32(data[1])<<8 | uint32(data[2])<<16 +} + +func encodeSize3Bytes(data uint32) []byte { + return []byte{byte(data), byte(data >> 8), byte(data >> 16)} +} diff --git a/encoding/da.go b/encoding/da.go index 8642936..0cd4eed 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -118,6 +118,12 @@ type Batch struct { TotalL1MessagePoppedBefore uint64 ParentBatchHash common.Hash Chunks []*Chunk + + // CodecV7 + InitialL1MessageIndex uint64 + InitialL1MessageQueueHash common.Hash + LastL1MessageQueueHash common.Hash + Blocks []*Block } // NumL1Messages returns the number of L1 messages in this block. @@ -137,6 +143,19 @@ func (b *Block) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { return *lastQueueIndex - totalL1MessagePoppedBefore + 1 } +// NumL1MessagesNoSkipping returns the number of L1 messages in this block. +// This method assumes that L1 messages can't be skipped. +func (b *Block) NumL1MessagesNoSkipping() uint16 { + var count uint16 + for _, txData := range b.Transactions { + if txData.Type == types.L1MessageTxType { + count++ + } + } + + return count +} + // NumL2Transactions returns the number of L2 transactions in this block. func (b *Block) NumL2Transactions() uint64 { var count uint64 From 8c2a5ccc1b2e7de5efa9aa94f3f48ab78f6c4eef Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 21 Jan 2025 13:04:19 +0800 Subject: [PATCH 04/47] rename to CodecV7 --- encoding/{codecv6.go => codecv7.go} | 72 +++---- .../{codecv6_types.go => codecv7_types.go} | 180 +++++++++--------- encoding/da.go | 6 - encoding/interfaces.go | 11 +- 4 files changed, 135 insertions(+), 134 deletions(-) rename encoding/{codecv6.go => codecv7.go} (82%) rename encoding/{codecv6_types.go => codecv7_types.go} (53%) diff --git a/encoding/codecv6.go b/encoding/codecv7.go similarity index 82% rename from encoding/codecv6.go rename to encoding/codecv7.go index 204f5eb..dddca68 100644 --- a/encoding/codecv6.go +++ b/encoding/codecv7.go @@ -15,20 +15,20 @@ import ( "github.com/scroll-tech/da-codec/encoding/zstd" ) -type DACodecV6 struct{} +type DACodecV7 struct{} // Version returns the codec version. -func (d *DACodecV6) Version() CodecVersion { - return CodecV6 +func (d *DACodecV7) Version() CodecVersion { + return CodecV7 } // MaxNumChunksPerBatch returns the maximum number of chunks per batch. -func (d *DACodecV6) MaxNumChunksPerBatch() int { +func (d *DACodecV7) MaxNumChunksPerBatch() int { return 1 } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (d *DACodecV6) NewDABlock(block *Block, _ uint64) (DABlock, error) { +func (d *DACodecV7) NewDABlock(block *Block, _ uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } @@ -46,7 +46,7 @@ func (d *DACodecV6) NewDABlock(block *Block, _ uint64) (DABlock, error) { return nil, errors.New("number of transactions exceeds max uint16") } - daBlock := newDABlockV6( + daBlock := newDABlockV7( block.Header.Number.Uint64(), // number block.Header.Time, // timestamp block.Header.BaseFee, // baseFee @@ -59,14 +59,14 @@ func (d *DACodecV6) NewDABlock(block *Block, _ uint64) (DABlock, error) { } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -// Note: For DACodecV6, this function is not implemented since there is no notion of DAChunk in this version. Blobs +// Note: For DACodecV7, this function is not implemented since there is no notion of DAChunk in this version. Blobs // contain the entire batch data, and it is up to a prover to decide the chunk sizes. -func (d *DACodecV6) NewDAChunk(_ *Chunk, _ uint64) (DAChunk, error) { +func (d *DACodecV7) NewDAChunk(_ *Chunk, _ uint64) (DAChunk, error) { return nil, nil } // NewDABatch creates a DABatch including blob from the provided Batch. -func (d *DACodecV6) NewDABatch(batch *Batch) (DABatch, error) { +func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { if len(batch.Chunks) != 0 { return nil, errors.New("batch must not contain any chunks") } @@ -80,7 +80,7 @@ func (d *DACodecV6) NewDABatch(batch *Batch) (DABatch, error) { return nil, fmt.Errorf("failed to construct blob: %w", err) } - daBatch, err := newDABatchV6(CodecV6, batch.Index, batch.ParentBatchHash, blobVersionedHash, blob, blobBytes) + daBatch, err := newDABatchV7(CodecV7, batch.Index, batch.ParentBatchHash, blobVersionedHash, blob, blobBytes) if err != nil { return nil, fmt.Errorf("failed to construct DABatch: %w", err) } @@ -88,14 +88,14 @@ func (d *DACodecV6) NewDABatch(batch *Batch) (DABatch, error) { return daBatch, nil } -func (d *DACodecV6) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) { +func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) { enableCompression, err := d.CheckBatchCompressedDataCompatibility(batch) if err != nil { return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } blobBytes := make([]byte, blobEnvelopeV7PayloadOffset) - blobBytes[blobEnvelopeV7VersionOffset] = uint8(CodecV6) + blobBytes[blobEnvelopeV7VersionOffset] = uint8(CodecV7) payloadBytes, err := d.constructBlobPayload(batch) if err != nil { @@ -144,8 +144,8 @@ func (d *DACodecV6) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b return blob, blobVersionedHash, blobBytes, nil } -func (d *DACodecV6) constructBlobPayload(batch *Batch) ([]byte, error) { - blobPayload := blobPayloadV6{ +func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) { + blobPayload := blobPayloadV7{ initialL1MessageIndex: batch.InitialL1MessageIndex, initialL1MessageQueueHash: batch.InitialL1MessageQueueHash, lastL1MessageQueueHash: batch.LastL1MessageQueueHash, @@ -157,30 +157,30 @@ func (d *DACodecV6) constructBlobPayload(batch *Batch) ([]byte, error) { // NewDABatchFromBytes decodes the given byte slice into a DABatch. // Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. -func (d *DACodecV6) NewDABatchFromBytes(data []byte) (DABatch, error) { - daBatch, err := decodeDABatchV6(data) +func (d *DACodecV7) NewDABatchFromBytes(data []byte) (DABatch, error) { + daBatch, err := decodeDABatchV7(data) if err != nil { return nil, fmt.Errorf("failed to decode DA batch: %w", err) } - if daBatch.version != CodecV6 { - return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV6, daBatch.version) + if daBatch.version != CodecV7 { + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV7, daBatch.version) } return daBatch, nil } -func (d *DACodecV6) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { +func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { return nil, nil } -func (d *DACodecV6) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { rawBytes := bytesFromBlobCanonical(blob) // read the blob envelope header version := rawBytes[blobEnvelopeV7VersionOffset] - if CodecVersion(version) != CodecV6 { - return fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV6, version) + if CodecVersion(version) != CodecV7 { + return fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV7, version) } // read the data size @@ -195,13 +195,13 @@ func (d *DACodecV6) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx compressed := rawBytes[blobEnvelopeV7CompressedFlagOffset] if compressed == 0x1 { var err error - if payloadBytes, err = decompressV6Bytes(payloadBytes); err != nil { + if payloadBytes, err = decompressV7Bytes(payloadBytes); err != nil { return fmt.Errorf("failed to decompress blob payload: %w", err) } } // read the payload - payload, err := decodeBlobPayloadV6(payloadBytes) + payload, err := decodeBlobPayloadV7(payloadBytes) if err != nil { return fmt.Errorf("failed to decode blob payload: %w", err) } @@ -216,7 +216,7 @@ func (d *DACodecV6) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx // checkCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a blob payload, compresses the data, and checks the compressed data compatibility. -func (d *DACodecV6) checkCompressedDataCompatibility(batch *Batch) (bool, error) { +func (d *DACodecV7) checkCompressedDataCompatibility(batch *Batch) (bool, error) { payloadBytes, err := d.constructBlobPayload(batch) if err != nil { return false, fmt.Errorf("failed to construct blob payload: %w", err) @@ -242,56 +242,56 @@ func (d *DACodecV6) checkCompressedDataCompatibility(batch *Batch) (bool, error) } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. -// Note: For DACodecV6, this function is not implemented since there is no notion of DAChunk in this version. Blobs +// Note: For DACodecV7, this function is not implemented since there is no notion of DAChunk in this version. Blobs // contain the entire batch data, and it is up to a prover to decide the chunk sizes. -func (d *DACodecV6) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error) { +func (d *DACodecV7) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error) { return true, nil } // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. -func (d *DACodecV6) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { +func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { return d.checkCompressedDataCompatibility(b) } // TODO: which of the Estimate* functions are needed? -func (d *DACodecV6) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { +func (d *DACodecV7) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } -func (d *DACodecV6) EstimateBatchL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, uint64, error) { +func (d *DACodecV7) EstimateBatchL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } -func (d *DACodecV6) EstimateBlockL1CommitCalldataSize(block *Block) (uint64, error) { +func (d *DACodecV7) EstimateBlockL1CommitCalldataSize(block *Block) (uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } -func (d *DACodecV6) EstimateChunkL1CommitCalldataSize(chunk *Chunk) (uint64, error) { +func (d *DACodecV7) EstimateChunkL1CommitCalldataSize(chunk *Chunk) (uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } -func (d *DACodecV6) EstimateChunkL1CommitGas(chunk *Chunk) (uint64, error) { +func (d *DACodecV7) EstimateChunkL1CommitGas(chunk *Chunk) (uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } -func (d *DACodecV6) EstimateBatchL1CommitGas(batch *Batch) (uint64, error) { +func (d *DACodecV7) EstimateBatchL1CommitGas(batch *Batch) (uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } -func (d *DACodecV6) EstimateBatchL1CommitCalldataSize(batch *Batch) (uint64, error) { +func (d *DACodecV7) EstimateBatchL1CommitCalldataSize(batch *Batch) (uint64, error) { //TODO implement me after contracts are implemented panic("implement me") } // JSONFromBytes converts the bytes to a DABatch and then marshals it to JSON. -func (d *DACodecV6) JSONFromBytes(data []byte) ([]byte, error) { +func (d *DACodecV7) JSONFromBytes(data []byte) ([]byte, error) { batch, err := d.NewDABatchFromBytes(data) if err != nil { return nil, fmt.Errorf("failed to decode DABatch from bytes: %w", err) diff --git a/encoding/codecv6_types.go b/encoding/codecv7_types.go similarity index 53% rename from encoding/codecv6_types.go rename to encoding/codecv7_types.go index 30e9505..1cebfd3 100644 --- a/encoding/codecv6_types.go +++ b/encoding/codecv7_types.go @@ -14,6 +14,12 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +const ( + daBatchV7EncodedLength = 73 + daBatchV7OffsetBlobVersionedHash = 9 + daBatchV7OffsetParentBatchHash = 41 +) + const ( blobEnvelopeV7VersionOffset = 0 blobEnvelopeV7ByteSizeOffset = 1 @@ -22,26 +28,26 @@ const ( ) const ( - blobPayloadV6EncodedLength = 8 + 2*common.HashLength + 8 + 2 - blobPayloadV6OffsetInitialL1MessageIndex = 0 - blobPayloadV6OffsetInitialL1MessageQueue = blobPayloadV6OffsetInitialL1MessageIndex + 8 - blobPayloadV6OffsetLastL1MessageQueue = blobPayloadV6OffsetInitialL1MessageQueue + common.HashLength - blobPayloadV6OffsetInitialL2BlockNumber = blobPayloadV6OffsetLastL1MessageQueue + common.HashLength - blobPayloadV6OffsetNumBlocks = blobPayloadV6OffsetInitialL2BlockNumber + 8 - blobPayloadV6OffsetBlocks = blobPayloadV6OffsetNumBlocks + 2 + blobPayloadV7EncodedLength = 8 + 2*common.HashLength + 8 + 2 + blobPayloadV7OffsetInitialL1MessageIndex = 0 + blobPayloadV7OffsetInitialL1MessageQueue = blobPayloadV7OffsetInitialL1MessageIndex + 8 + blobPayloadV7OffsetLastL1MessageQueue = blobPayloadV7OffsetInitialL1MessageQueue + common.HashLength + blobPayloadV7OffsetInitialL2BlockNumber = blobPayloadV7OffsetLastL1MessageQueue + common.HashLength + blobPayloadV7OffsetNumBlocks = blobPayloadV7OffsetInitialL2BlockNumber + 8 + blobPayloadV7OffsetBlocks = blobPayloadV7OffsetNumBlocks + 2 ) const ( - daBlockV6BlockContextByteSize = 52 - daBlockV6OffsetTimestamp = 0 - daBlockV6OffsetBaseFee = daBlockV6OffsetTimestamp + 8 - daBlockV6OffsetGasLimit = daBlockV6OffsetBaseFee + 32 - daBlockV6numTransactionsOffset = daBlockV6OffsetGasLimit + 8 - daBlockV6numL1MessagesOffset = daBlockV6numTransactionsOffset + 2 + daBlockV7BlockContextByteSize = 52 + daBlockV7OffsetTimestamp = 0 + daBlockV7OffsetBaseFee = daBlockV7OffsetTimestamp + 8 + daBlockV7OffsetGasLimit = daBlockV7OffsetBaseFee + 32 + daBlockV7numTransactionsOffset = daBlockV7OffsetGasLimit + 8 + daBlockV7numL1MessagesOffset = daBlockV7numTransactionsOffset + 2 ) // daBatchV3 contains metadata about a batch of DAChunks. -type daBatchV6 struct { +type daBatchV7 struct { version CodecVersion batchIndex uint64 parentBatchHash common.Hash @@ -51,9 +57,9 @@ type daBatchV6 struct { blobBytes []byte } -// newDABatchV6 is a constructor for daBatchV6 that calls blobDataProofForPICircuit internally. -func newDABatchV6(version CodecVersion, batchIndex uint64, parentBatchHash, blobVersionedHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV6, error) { - daBatch := &daBatchV6{ +// newDABatchV7 is a constructor for daBatchV7 that calls blobDataProofForPICircuit internally. +func newDABatchV7(version CodecVersion, batchIndex uint64, parentBatchHash, blobVersionedHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV7, error) { + daBatch := &daBatchV7{ version: version, batchIndex: batchIndex, parentBatchHash: parentBatchHash, @@ -65,61 +71,61 @@ func newDABatchV6(version CodecVersion, batchIndex uint64, parentBatchHash, blob return daBatch, nil } -func decodeDABatchV6(data []byte) (*daBatchV6, error) { - if len(data) != daBatchV6EncodedLength { - return nil, fmt.Errorf("invalid data length for DABatchV6, expected %d bytes but got %d", daBatchV6EncodedLength, len(data)) +func decodeDABatchV7(data []byte) (*daBatchV7, error) { + if len(data) != daBatchV7EncodedLength { + return nil, fmt.Errorf("invalid data length for DABatchV7, expected %d bytes but got %d", daBatchV7EncodedLength, len(data)) } version := CodecVersion(data[daBatchOffsetVersion]) - batchIndex := binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV6OffsetBlobVersionedHash]) - blobVersionedHash := common.BytesToHash(data[daBatchV6OffsetBlobVersionedHash:daBatchV6OffsetParentBatchHash]) - parentBatchHash := common.BytesToHash(data[daBatchV6OffsetParentBatchHash:daBatchV6EncodedLength]) + batchIndex := binary.BigEndian.Uint64(data[daBatchOffsetBatchIndex:daBatchV7OffsetBlobVersionedHash]) + blobVersionedHash := common.BytesToHash(data[daBatchV7OffsetBlobVersionedHash:daBatchV7OffsetParentBatchHash]) + parentBatchHash := common.BytesToHash(data[daBatchV7OffsetParentBatchHash:daBatchV7EncodedLength]) - return newDABatchV6(version, batchIndex, parentBatchHash, blobVersionedHash, nil, nil) + return newDABatchV7(version, batchIndex, parentBatchHash, blobVersionedHash, nil, nil) } // Encode serializes the DABatchV3 into bytes. -func (b *daBatchV6) Encode() []byte { - batchBytes := make([]byte, daBatchV6EncodedLength) +func (b *daBatchV7) Encode() []byte { + batchBytes := make([]byte, daBatchV7EncodedLength) batchBytes[daBatchOffsetVersion] = byte(b.version) - binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV6OffsetBlobVersionedHash], b.batchIndex) - copy(batchBytes[daBatchV6OffsetBlobVersionedHash:daBatchV6OffsetParentBatchHash], b.blobVersionedHash[:]) - copy(batchBytes[daBatchV6OffsetParentBatchHash:daBatchV6EncodedLength], b.parentBatchHash[:]) + binary.BigEndian.PutUint64(batchBytes[daBatchOffsetBatchIndex:daBatchV7OffsetBlobVersionedHash], b.batchIndex) + copy(batchBytes[daBatchV7OffsetBlobVersionedHash:daBatchV7OffsetParentBatchHash], b.blobVersionedHash[:]) + copy(batchBytes[daBatchV7OffsetParentBatchHash:daBatchV7EncodedLength], b.parentBatchHash[:]) return batchBytes } // Hash computes the hash of the serialized DABatch. -func (b *daBatchV6) Hash() common.Hash { +func (b *daBatchV7) Hash() common.Hash { return crypto.Keccak256Hash(b.Encode()) } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -// Note: This method is not implemented for daBatchV6. -func (b *daBatchV6) BlobDataProofForPointEvaluation() ([]byte, error) { +// Note: This method is not implemented for daBatchV7. +func (b *daBatchV7) BlobDataProofForPointEvaluation() ([]byte, error) { return nil, nil } // Blob returns the blob of the batch. -func (b *daBatchV6) Blob() *kzg4844.Blob { +func (b *daBatchV7) Blob() *kzg4844.Blob { return b.blob } // BlobBytes returns the blob bytes of the batch. -func (b *daBatchV6) BlobBytes() []byte { +func (b *daBatchV7) BlobBytes() []byte { return b.blobBytes } // MarshalJSON implements the custom JSON serialization for daBatchV3. // This method is designed to provide prover with batch info in snake_case format. -func (b *daBatchV6) MarshalJSON() ([]byte, error) { - type daBatchV6JSON struct { +func (b *daBatchV7) MarshalJSON() ([]byte, error) { + type daBatchV7JSON struct { Version CodecVersion `json:"version"` BatchIndex uint64 `json:"batch_index"` BlobVersionedHash string `json:"blob_versioned_hash"` ParentBatchHash string `json:"parent_batch_hash"` } - return json.Marshal(&daBatchV6JSON{ + return json.Marshal(&daBatchV7JSON{ Version: b.version, BatchIndex: b.batchIndex, BlobVersionedHash: b.blobVersionedHash.Hex(), @@ -128,23 +134,23 @@ func (b *daBatchV6) MarshalJSON() ([]byte, error) { } // Version returns the version of the DABatch. -func (b *daBatchV6) Version() CodecVersion { +func (b *daBatchV7) Version() CodecVersion { return b.version } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -// For daBatchV6, there is no skipped L1 message bitmap. -func (b *daBatchV6) SkippedL1MessageBitmap() []byte { +// For daBatchV7, there is no skipped L1 message bitmap. +func (b *daBatchV7) SkippedL1MessageBitmap() []byte { return nil } // DataHash returns the data hash of the DABatch. -// For daBatchV6, there is no data hash. -func (b *daBatchV6) DataHash() common.Hash { +// For daBatchV7, there is no data hash. +func (b *daBatchV7) DataHash() common.Hash { return common.Hash{} } -type blobPayloadV6 struct { +type blobPayloadV7 struct { initialL1MessageIndex uint64 initialL1MessageQueueHash common.Hash lastL1MessageQueueHash common.Hash @@ -157,20 +163,20 @@ type blobPayloadV6 struct { transactions []types.Transactions } -func (b *blobPayloadV6) Encode() ([]byte, error) { - payloadBytes := make([]byte, blobPayloadV6EncodedLength) +func (b *blobPayloadV7) Encode() ([]byte, error) { + payloadBytes := make([]byte, blobPayloadV7EncodedLength) - binary.BigEndian.PutUint64(payloadBytes[blobPayloadV6OffsetInitialL1MessageIndex:blobPayloadV6OffsetInitialL1MessageQueue], b.initialL1MessageIndex) - copy(payloadBytes[blobPayloadV6OffsetInitialL1MessageQueue:blobPayloadV6OffsetLastL1MessageQueue], b.initialL1MessageQueueHash[:]) - copy(payloadBytes[blobPayloadV6OffsetLastL1MessageQueue:blobPayloadV6OffsetInitialL2BlockNumber], b.lastL1MessageQueueHash[:]) + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetInitialL1MessageQueue], b.initialL1MessageIndex) + copy(payloadBytes[blobPayloadV7OffsetInitialL1MessageQueue:blobPayloadV7OffsetLastL1MessageQueue], b.initialL1MessageQueueHash[:]) + copy(payloadBytes[blobPayloadV7OffsetLastL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber], b.lastL1MessageQueueHash[:]) blockNumber := b.blocks[0].Header.Number.Uint64() - binary.BigEndian.PutUint64(payloadBytes[blobPayloadV6OffsetInitialL2BlockNumber:blobPayloadV6OffsetNumBlocks], blockNumber) - binary.BigEndian.PutUint16(payloadBytes[blobPayloadV6OffsetNumBlocks:blobPayloadV6OffsetBlocks], uint16(len(b.blocks))) + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks], blockNumber) + binary.BigEndian.PutUint16(payloadBytes[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks], uint16(len(b.blocks))) var transactionBytes []byte for _, block := range b.blocks { - daBlock := newDABlockV6(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), block.NumL1MessagesNoSkipping()) + daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), block.NumL1MessagesNoSkipping()) payloadBytes = append(payloadBytes, daBlock.Encode()...) // encode L2 txs as RLP and append to transactionBytes @@ -191,25 +197,25 @@ func (b *blobPayloadV6) Encode() ([]byte, error) { return payloadBytes, nil } -func decodeBlobPayloadV6(data []byte) (*blobPayloadV6, error) { - if len(data) < blobPayloadV6EncodedLength { - return nil, fmt.Errorf("invalid data length for blobPayloadV6, expected at least %d bytes but got %d", blobPayloadV6EncodedLength, len(data)) +func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { + if len(data) < blobPayloadV7EncodedLength { + return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7EncodedLength, len(data)) } - initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV6OffsetInitialL1MessageIndex:blobPayloadV6OffsetInitialL1MessageQueue]) - initialL1MessageQueueHash := common.BytesToHash(data[blobPayloadV6OffsetInitialL1MessageQueue:blobPayloadV6OffsetLastL1MessageQueue]) - lastL1MessageQueueHash := common.BytesToHash(data[blobPayloadV6OffsetLastL1MessageQueue:blobPayloadV6OffsetInitialL2BlockNumber]) + initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetInitialL1MessageQueue]) + initialL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetInitialL1MessageQueue:blobPayloadV7OffsetLastL1MessageQueue]) + lastL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetLastL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber]) - initialL2BlockNumber := binary.BigEndian.Uint64(data[blobPayloadV6OffsetInitialL2BlockNumber:blobPayloadV6OffsetNumBlocks]) - numBlocks := int(binary.BigEndian.Uint16(data[blobPayloadV6OffsetNumBlocks:blobPayloadV6OffsetBlocks])) + initialL2BlockNumber := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks]) + numBlocks := int(binary.BigEndian.Uint16(data[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks])) // decode DA Blocks from the blob daBlocks := make([]DABlock, numBlocks) for i := uint64(0); i < uint64(numBlocks); i++ { - daBlock := newDABlockV6WithNumber(initialL2BlockNumber + i) + daBlock := newDABlockV7WithNumber(initialL2BlockNumber + i) - startBytes := blobPayloadV6OffsetBlocks + i*daBlockV6BlockContextByteSize - endBytes := startBytes + daBlockV6BlockContextByteSize + startBytes := blobPayloadV7OffsetBlocks + i*daBlockV7BlockContextByteSize + endBytes := startBytes + daBlockV7BlockContextByteSize if err := daBlock.Decode(data[startBytes:endBytes]); err != nil { return nil, fmt.Errorf("failed to decode DA block: %w", err) } @@ -218,7 +224,7 @@ func decodeBlobPayloadV6(data []byte) (*blobPayloadV6, error) { } // decode transactions for each block from the blob - txBytes := data[blobPayloadV6OffsetBlocks+daBlockV6BlockContextByteSize*numBlocks:] + txBytes := data[blobPayloadV7OffsetBlocks+daBlockV7BlockContextByteSize*numBlocks:] curIndex := 0 var transactions []types.Transactions @@ -241,7 +247,7 @@ func decodeBlobPayloadV6(data []byte) (*blobPayloadV6, error) { transactions = append(transactions, blockTransactions) } - return &blobPayloadV6{ + return &blobPayloadV7{ initialL1MessageIndex: initialL1MessageIndex, initialL1MessageQueueHash: initialL1MessageQueueHash, lastL1MessageQueueHash: lastL1MessageQueueHash, @@ -250,13 +256,13 @@ func decodeBlobPayloadV6(data []byte) (*blobPayloadV6, error) { }, nil } -type daBlockV6 struct { +type daBlockV7 struct { daBlockV0 } -// newDABlockV6 is a constructor function for daBlockV6 that initializes the internal fields. -func newDABlockV6(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV6 { - return &daBlockV6{ +// newDABlockV7 is a constructor function for daBlockV7 that initializes the internal fields. +func newDABlockV7(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV7 { + return &daBlockV7{ daBlockV0: daBlockV0{ number: number, timestamp: timestamp, @@ -268,8 +274,8 @@ func newDABlockV6(number uint64, timestamp uint64, baseFee *big.Int, gasLimit ui } } -func newDABlockV6WithNumber(number uint64) *daBlockV6 { - return &daBlockV6{ +func newDABlockV7WithNumber(number uint64) *daBlockV7 { + return &daBlockV7{ daBlockV0: daBlockV0{ number: number, }, @@ -277,35 +283,35 @@ func newDABlockV6WithNumber(number uint64) *daBlockV6 { } // Encode serializes the DABlock into a slice of bytes. -func (b *daBlockV6) Encode() []byte { - daBlockBytes := make([]byte, daBlockV6BlockContextByteSize) - binary.BigEndian.PutUint64(daBlockBytes[daBlockV6OffsetTimestamp:daBlockV6OffsetBaseFee], b.timestamp) +func (b *daBlockV7) Encode() []byte { + daBlockBytes := make([]byte, daBlockV7BlockContextByteSize) + binary.BigEndian.PutUint64(daBlockBytes[daBlockV7OffsetTimestamp:daBlockV7OffsetBaseFee], b.timestamp) if b.baseFee != nil { - b.baseFee.FillBytes(daBlockBytes[daBlockV6OffsetBaseFee:daBlockV6OffsetGasLimit]) + b.baseFee.FillBytes(daBlockBytes[daBlockV7OffsetBaseFee:daBlockV7OffsetGasLimit]) } - binary.BigEndian.PutUint64(daBlockBytes[daBlockV6OffsetGasLimit:daBlockV6numTransactionsOffset], b.gasLimit) - binary.BigEndian.PutUint16(daBlockBytes[daBlockV6numTransactionsOffset:daBlockV6numL1MessagesOffset], b.numTransactions) - binary.BigEndian.PutUint16(daBlockBytes[daBlockV6numL1MessagesOffset:], b.numL1Messages) + binary.BigEndian.PutUint64(daBlockBytes[daBlockV7OffsetGasLimit:daBlockV7numTransactionsOffset], b.gasLimit) + binary.BigEndian.PutUint16(daBlockBytes[daBlockV7numTransactionsOffset:daBlockV7numL1MessagesOffset], b.numTransactions) + binary.BigEndian.PutUint16(daBlockBytes[daBlockV7numL1MessagesOffset:], b.numL1Messages) return daBlockBytes } // Decode populates the fields of a DABlock from a byte slice. -func (b *daBlockV6) Decode(data []byte) error { - if len(data) != daBlockV6BlockContextByteSize { - return fmt.Errorf("block encoding is not blockContextByteSize bytes long expected %d, got %d", daBlockV6BlockContextByteSize, len(data)) +func (b *daBlockV7) Decode(data []byte) error { + if len(data) != daBlockV7BlockContextByteSize { + return fmt.Errorf("block encoding is not blockContextByteSize bytes long expected %d, got %d", daBlockV7BlockContextByteSize, len(data)) } - b.timestamp = binary.BigEndian.Uint64(data[daBlockV6OffsetTimestamp:daBlockV6OffsetBaseFee]) - b.baseFee = new(big.Int).SetBytes(data[daBlockV6OffsetBaseFee:daBlockV6OffsetGasLimit]) - b.gasLimit = binary.BigEndian.Uint64(data[daBlockV6OffsetGasLimit:daBlockV6numTransactionsOffset]) - b.numTransactions = binary.BigEndian.Uint16(data[daBlockV6numTransactionsOffset:daBlockV6numL1MessagesOffset]) - b.numL1Messages = binary.BigEndian.Uint16(data[daBlockV6numL1MessagesOffset:]) + b.timestamp = binary.BigEndian.Uint64(data[daBlockV7OffsetTimestamp:daBlockV7OffsetBaseFee]) + b.baseFee = new(big.Int).SetBytes(data[daBlockV7OffsetBaseFee:daBlockV7OffsetGasLimit]) + b.gasLimit = binary.BigEndian.Uint64(data[daBlockV7OffsetGasLimit:daBlockV7numTransactionsOffset]) + b.numTransactions = binary.BigEndian.Uint16(data[daBlockV7numTransactionsOffset:daBlockV7numL1MessagesOffset]) + b.numL1Messages = binary.BigEndian.Uint16(data[daBlockV7numL1MessagesOffset:]) return nil } -// decompressV6Bytes decompresses the given blob bytes into the original payload bytes. -func decompressV6Bytes(compressedBytes []byte) ([]byte, error) { +// decompressV7Bytes decompresses the given blob bytes into the original payload bytes. +func decompressV7Bytes(compressedBytes []byte) ([]byte, error) { var res []byte r := bytes.NewReader(compressedBytes) diff --git a/encoding/da.go b/encoding/da.go index 0cd4eed..35e3774 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -77,12 +77,6 @@ const ( daBatchV3EncodedLength = 193 ) -const ( - daBatchV6EncodedLength = 73 - daBatchV6OffsetBlobVersionedHash = 9 - daBatchV6OffsetParentBatchHash = 41 -) - const ( payloadLengthBytes = 4 calldataNonZeroByteGas = 16 diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 3c9d415..e8d6e34 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -76,8 +76,9 @@ const ( CodecV2 CodecV3 CodecV4 - _ // CodecV5 is skipped - CodecV6 + _ + _ + CodecV7 ) // CodecFromVersion returns the appropriate codec for the given version. @@ -93,8 +94,8 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { return &DACodecV3{}, nil case CodecV4: return &DACodecV4{}, nil - case CodecV6: - return &DACodecV6{}, nil + case CodecV7: + return &DACodecV7{}, nil default: return nil, fmt.Errorf("unsupported codec version: %v", version) } @@ -103,7 +104,7 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { if chainCfg.IsDarwinV2(startBlockTimestamp) { // TODO: replace with correct fork - return &DACodecV6{} + return &DACodecV7{} } else if chainCfg.IsDarwinV2(startBlockTimestamp) { return &DACodecV4{} } else if chainCfg.IsDarwin(startBlockTimestamp) { From 91171709155b33b546fd37eea65cbf928364d02b Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 22 Jan 2025 08:34:41 +0800 Subject: [PATCH 05/47] add NewDABatchFromParams --- encoding/codecv0.go | 4 ++++ encoding/codecv7.go | 6 +++++- encoding/codecv7_types.go | 8 ++++---- encoding/interfaces.go | 1 + 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index cbe4af3..34b8526 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -223,6 +223,10 @@ func (d *DACodecV0) NewDABatchFromBytes(data []byte) (DABatch, error) { ), nil } +func (d *DACodecV0) NewDABatchFromParams(_ uint64, _, _ common.Hash) (DABatch, error) { + return nil, nil +} + // EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. func (d *DACodecV0) EstimateBlockL1CommitCalldataSize(b *Block) (uint64, error) { var size uint64 diff --git a/encoding/codecv7.go b/encoding/codecv7.go index dddca68..497cf7e 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -80,7 +80,7 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { return nil, fmt.Errorf("failed to construct blob: %w", err) } - daBatch, err := newDABatchV7(CodecV7, batch.Index, batch.ParentBatchHash, blobVersionedHash, blob, blobBytes) + daBatch, err := newDABatchV7(CodecV7, batch.Index, blobVersionedHash, batch.ParentBatchHash, blob, blobBytes) if err != nil { return nil, fmt.Errorf("failed to construct DABatch: %w", err) } @@ -170,6 +170,10 @@ func (d *DACodecV7) NewDABatchFromBytes(data []byte) (DABatch, error) { return daBatch, nil } +func (d *DACodecV7) NewDABatchFromParams(batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash) (DABatch, error) { + return newDABatchV7(CodecV7, batchIndex, blobVersionedHash, parentBatchHash, nil, nil) +} + func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { return nil, nil } diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 1cebfd3..3f862fd 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -50,20 +50,20 @@ const ( type daBatchV7 struct { version CodecVersion batchIndex uint64 - parentBatchHash common.Hash blobVersionedHash common.Hash + parentBatchHash common.Hash blob *kzg4844.Blob blobBytes []byte } // newDABatchV7 is a constructor for daBatchV7 that calls blobDataProofForPICircuit internally. -func newDABatchV7(version CodecVersion, batchIndex uint64, parentBatchHash, blobVersionedHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV7, error) { +func newDABatchV7(version CodecVersion, batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV7, error) { daBatch := &daBatchV7{ version: version, batchIndex: batchIndex, - parentBatchHash: parentBatchHash, blobVersionedHash: blobVersionedHash, + parentBatchHash: parentBatchHash, blob: blob, blobBytes: blobBytes, } @@ -81,7 +81,7 @@ func decodeDABatchV7(data []byte) (*daBatchV7, error) { blobVersionedHash := common.BytesToHash(data[daBatchV7OffsetBlobVersionedHash:daBatchV7OffsetParentBatchHash]) parentBatchHash := common.BytesToHash(data[daBatchV7OffsetParentBatchHash:daBatchV7EncodedLength]) - return newDABatchV7(version, batchIndex, parentBatchHash, blobVersionedHash, nil, nil) + return newDABatchV7(version, batchIndex, blobVersionedHash, parentBatchHash, nil, nil) } // Encode serializes the DABatchV3 into bytes. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index e8d6e34..8d04ca4 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -49,6 +49,7 @@ type Codec interface { NewDAChunk(*Chunk, uint64) (DAChunk, error) NewDABatch(*Batch) (DABatch, error) NewDABatchFromBytes([]byte) (DABatch, error) + NewDABatchFromParams(batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash) (DABatch, error) DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error From 4ef7bfc6b6349598730e4c123820030f5c3f1043 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 22 Jan 2025 12:18:00 +0800 Subject: [PATCH 06/47] add DecodeBlob to Codec --- encoding/codecv0.go | 4 ++++ encoding/codecv7.go | 23 ++++++++++++++++------- encoding/codecv7_types.go | 12 ++++++++++++ encoding/interfaces.go | 8 ++++++++ 4 files changed, 40 insertions(+), 7 deletions(-) diff --git a/encoding/codecv0.go b/encoding/codecv0.go index 34b8526..0e8b70f 100644 --- a/encoding/codecv0.go +++ b/encoding/codecv0.go @@ -161,6 +161,10 @@ func (d *DACodecV0) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx return nil } +func (d *DACodecV0) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { + return nil, nil +} + // NewDABatch creates a DABatch from the provided Batch. func (d *DACodecV0) NewDABatch(batch *Batch) (DABatch, error) { // this encoding can only support a fixed number of chunks per batch diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 497cf7e..32be8c9 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -178,19 +178,19 @@ func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { return nil, nil } -func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { +func (d *DACodecV7) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { rawBytes := bytesFromBlobCanonical(blob) // read the blob envelope header version := rawBytes[blobEnvelopeV7VersionOffset] if CodecVersion(version) != CodecV7 { - return fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV7, version) + return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV7, version) } // read the data size blobEnvelopeSize := decodeSize3Bytes(rawBytes[blobEnvelopeV7ByteSizeOffset:blobEnvelopeV7CompressedFlagOffset]) if blobEnvelopeSize+blobEnvelopeV7PayloadOffset > uint32(len(rawBytes)) { - return fmt.Errorf("blob envelope size exceeds the raw data size: %d > %d", blobEnvelopeSize, len(rawBytes)) + return nil, fmt.Errorf("blob envelope size exceeds the raw data size: %d > %d", blobEnvelopeSize, len(rawBytes)) } payloadBytes := rawBytes[blobEnvelopeV7PayloadOffset : blobEnvelopeV7PayloadOffset+blobEnvelopeSize] @@ -200,19 +200,28 @@ func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx if compressed == 0x1 { var err error if payloadBytes, err = decompressV7Bytes(payloadBytes); err != nil { - return fmt.Errorf("failed to decompress blob payload: %w", err) + return nil, fmt.Errorf("failed to decompress blob payload: %w", err) } } // read the payload payload, err := decodeBlobPayloadV7(payloadBytes) if err != nil { - return fmt.Errorf("failed to decode blob payload: %w", err) + return nil, fmt.Errorf("failed to decode blob payload: %w", err) + } + + return payload, nil +} + +func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { + payload, err := d.DecodeBlob(blob) + if err != nil { + return fmt.Errorf("failed to decode blob: %w", err) } chunks = append(chunks, &DAChunkRawTx{ - Blocks: payload.daBlocks, - Transactions: payload.transactions, + Blocks: payload.Blocks(), + Transactions: payload.Transactions(), }) return nil diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 3f862fd..b3854d6 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -163,6 +163,18 @@ type blobPayloadV7 struct { transactions []types.Transactions } +func (b *blobPayloadV7) Blocks() []DABlock { + return b.daBlocks +} + +func (b *blobPayloadV7) Transactions() []types.Transactions { + return b.transactions +} + +func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { + return b.initialL1MessageIndex +} + func (b *blobPayloadV7) Encode() ([]byte, error) { payloadBytes := make([]byte, blobPayloadV7EncodedLength) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 8d04ca4..2ab850f 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -5,6 +5,7 @@ import ( "math/big" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/params" ) @@ -40,6 +41,12 @@ type DABatch interface { SkippedL1MessageBitmap() []byte } +type DABlobPayload interface { + Blocks() []DABlock + Transactions() []types.Transactions + InitialL1MessageIndex() uint64 +} + // Codec represents the interface for encoding and decoding DA-related structures. type Codec interface { Version() CodecVersion @@ -53,6 +60,7 @@ type Codec interface { DecodeDAChunksRawTx(chunkBytes [][]byte) ([]*DAChunkRawTx, error) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error + DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) CheckChunkCompressedDataCompatibility(*Chunk) (bool, error) CheckBatchCompressedDataCompatibility(*Batch) (bool, error) From bf16156415d1cdf5f194714c6d15ce3615e0ea2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Faruk=20Irmak?= Date: Mon, 27 Jan 2025 13:47:32 +0300 Subject: [PATCH 07/47] Update da.go --- encoding/da.go | 1 + 1 file changed, 1 insertion(+) diff --git a/encoding/da.go b/encoding/da.go index 90bc979..dfca92d 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -664,6 +664,7 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin } else if !config.IsEuclid(blockTimestamp) { return CodecV4 } else { + // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly return CodecV6 } } From 2817674da5a191005a352117db52fe08a74cd72c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=96mer=20Faruk=20Irmak?= Date: Mon, 27 Jan 2025 13:48:01 +0300 Subject: [PATCH 08/47] Update interfaces.go --- encoding/interfaces.go | 1 + 1 file changed, 1 insertion(+) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index e845102..ac0d1ed 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -105,6 +105,7 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { if chainCfg.IsEuclid(startBlockTimestamp) { + // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly return NewDACodecV6() } else if chainCfg.IsDarwinV2(startBlockTimestamp) { return &DACodecV4{} From 64133efc3843f06522639a54694376f56fe70216 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 28 Jan 2025 09:53:24 +0800 Subject: [PATCH 09/47] fixes after merge --- encoding/interfaces.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 06b0648..84d345d 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -118,6 +118,8 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { // TODO: replace with correct fork if chainCfg.IsEuclid(startBlockTimestamp) { + return &DACodecV7{} + } else if chainCfg.IsEuclid(startBlockTimestamp) { // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly return NewDACodecV6() } else if chainCfg.IsDarwinV2(startBlockTimestamp) { From 1dde89ae3ab03889645a294ecb35afc549ceef1f Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 29 Jan 2025 17:15:02 +0800 Subject: [PATCH 10/47] address review comments --- encoding/codecv7.go | 36 ++++++----- encoding/codecv7_types.go | 127 ++++++++++++++++++++++++++------------ encoding/da.go | 28 +++++++-- 3 files changed, 130 insertions(+), 61 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 32be8c9..3effbab 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -24,22 +24,26 @@ func (d *DACodecV7) Version() CodecVersion { // MaxNumChunksPerBatch returns the maximum number of chunks per batch. func (d *DACodecV7) MaxNumChunksPerBatch() int { - return 1 + return math.MaxInt } // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. -func (d *DACodecV7) NewDABlock(block *Block, _ uint64) (DABlock, error) { +func (d *DACodecV7) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { if !block.Header.Number.IsUint64() { return nil, errors.New("block number is not uint64") } - // note: numL1Messages includes skipped messages - numL1Messages := block.NumL1MessagesNoSkipping() + numL1Messages, highestQueueIndex, err := block.NumL1MessagesNoSkipping() + if err != nil { + return nil, fmt.Errorf("failed to calculate number of L1 messages: %w", err) + } if numL1Messages > math.MaxUint16 { return nil, errors.New("number of L1 messages exceeds max uint16") } + if totalL1MessagePoppedBefore+uint64(numL1Messages) != highestQueueIndex { + return nil, fmt.Errorf("failed to sanity check L1 messages count: totalL1MessagePoppedBefore + numL1Messages != highestQueueIndex: %d + %d != %d", totalL1MessagePoppedBefore, numL1Messages, highestQueueIndex) + } - // note: numTransactions includes skipped messages numL2Transactions := block.NumL2Transactions() numTransactions := uint64(numL1Messages) + numL2Transactions if numTransactions > math.MaxUint16 { @@ -60,7 +64,7 @@ func (d *DACodecV7) NewDABlock(block *Block, _ uint64) (DABlock, error) { // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. // Note: For DACodecV7, this function is not implemented since there is no notion of DAChunk in this version. Blobs -// contain the entire batch data, and it is up to a prover to decide the chunk sizes. +// contain the entire batch data without any information of Chunks within. func (d *DACodecV7) NewDAChunk(_ *Chunk, _ uint64) (DAChunk, error) { return nil, nil } @@ -94,8 +98,8 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } - blobBytes := make([]byte, blobEnvelopeV7PayloadOffset) - blobBytes[blobEnvelopeV7VersionOffset] = uint8(CodecV7) + blobBytes := make([]byte, blobEnvelopeV7OffsetPayload) + blobBytes[blobEnvelopeV7OffsetVersion] = uint8(CodecV7) payloadBytes, err := d.constructBlobPayload(batch) if err != nil { @@ -113,14 +117,14 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b log.Error("ConstructBlob: compressed data compatibility check failed", "err", err, "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) return nil, common.Hash{}, nil, err } - blobBytes[blobEnvelopeV7CompressedFlagOffset] = 0x1 + blobBytes[blobEnvelopeV7OffsetCompressedFlag] = 0x1 payloadBytes = compressedPayloadBytes } else { - blobBytes[blobEnvelopeV7CompressedFlagOffset] = 0x0 + blobBytes[blobEnvelopeV7OffsetCompressedFlag] = 0x0 } sizeSlice := encodeSize3Bytes(uint32(len(payloadBytes))) - copy(blobBytes[blobEnvelopeV7ByteSizeOffset:blobEnvelopeV7CompressedFlagOffset], sizeSlice) + copy(blobBytes[blobEnvelopeV7OffsetByteSize:blobEnvelopeV7OffsetCompressedFlag], sizeSlice) blobBytes = append(blobBytes, payloadBytes...) if len(blobBytes) > maxEffectiveBlobBytes { @@ -182,21 +186,21 @@ func (d *DACodecV7) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { rawBytes := bytesFromBlobCanonical(blob) // read the blob envelope header - version := rawBytes[blobEnvelopeV7VersionOffset] + version := rawBytes[blobEnvelopeV7OffsetVersion] if CodecVersion(version) != CodecV7 { return nil, fmt.Errorf("codec version mismatch: expected %d but found %d", CodecV7, version) } // read the data size - blobEnvelopeSize := decodeSize3Bytes(rawBytes[blobEnvelopeV7ByteSizeOffset:blobEnvelopeV7CompressedFlagOffset]) - if blobEnvelopeSize+blobEnvelopeV7PayloadOffset > uint32(len(rawBytes)) { + blobEnvelopeSize := decodeSize3Bytes(rawBytes[blobEnvelopeV7OffsetByteSize:blobEnvelopeV7OffsetCompressedFlag]) + if blobEnvelopeSize+blobEnvelopeV7OffsetPayload > uint32(len(rawBytes)) { return nil, fmt.Errorf("blob envelope size exceeds the raw data size: %d > %d", blobEnvelopeSize, len(rawBytes)) } - payloadBytes := rawBytes[blobEnvelopeV7PayloadOffset : blobEnvelopeV7PayloadOffset+blobEnvelopeSize] + payloadBytes := rawBytes[blobEnvelopeV7OffsetPayload : blobEnvelopeV7OffsetPayload+blobEnvelopeSize] // read the compressed flag and decompress if needed - compressed := rawBytes[blobEnvelopeV7CompressedFlagOffset] + compressed := rawBytes[blobEnvelopeV7OffsetCompressedFlag] if compressed == 0x1 { var err error if payloadBytes, err = decompressV7Bytes(payloadBytes); err != nil { diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index b3854d6..9449590 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -14,39 +14,77 @@ import ( "github.com/scroll-tech/go-ethereum/crypto/kzg4844" ) +// Below is the encoding for `BatchHeader` V7, total 73 bytes. +// * Field Bytes Type Index Comments +// * version 1 uint8 0 The batch version +// * batchIndex 8 uint64 1 The index of the batch +// * blobVersionedHash 32 bytes32 9 The versioned hash of the blob with this batch’s data +// * parentBatchHash 32 bytes32 41 The parent batch hash + const ( daBatchV7EncodedLength = 73 daBatchV7OffsetBlobVersionedHash = 9 daBatchV7OffsetParentBatchHash = 41 ) +// Below is the encoding format for BlobEnvelopeV7. +// * Field Bytes Type Index Comments +// * version 1 uint8 0 The version of the DA codec (batch/blob) +// * n_bytes[1] 1 uint8 1 Value denoting the number of bytes, n_bytes[1] +// * n_bytes[2] 1 uint8 2 Value denoting the number of bytes, n_bytes[2]*256 +// * n_bytes[3] 1 uint8 3 Value denoting the number of bytes, n_bytes[3]*256^2 +// * flag 1 bool 4 1-byte flag to denote zstd-encoded/raw bytes +// * payload N bytes 5 Possibly zstd-encoded payload bytes +// * padding (4096*31 - (N+5)) bytes N+5 Padding to align to 4096*31 bytes + const ( - blobEnvelopeV7VersionOffset = 0 - blobEnvelopeV7ByteSizeOffset = 1 - blobEnvelopeV7CompressedFlagOffset = 4 - blobEnvelopeV7PayloadOffset = 5 + blobEnvelopeV7OffsetVersion = 0 + blobEnvelopeV7OffsetByteSize = 1 + blobEnvelopeV7OffsetCompressedFlag = 4 + blobEnvelopeV7OffsetPayload = 5 ) +// Below is the encoding format for the batch metadata and blocks. +// * Field Bytes Type Index Comments +// * initialL1MessageIndex 8 uint64 0 Queue index of the first L1 message contained in this batch +// * initialL1MessageQueueHash 32 bytes32 8 Hash of the L1 message queue at the last message in the previous batch +// * lastL1MessageQueueHash 32 bytes32 40 Hash of the L1 message queue at the last message in this batch +// * initialL2BlockNumber 8 uint64 72 The initial L2 block number in this batch +// * numBlocks 2 uint16 80 The number of blocks in this batch +// * block[0] 52 BlockContextV2 82 The first block in this batch +// * block[i] 52 BlockContextV2 82+52*i The (i+1)th block in this batch +// * block[n-1] 52 BlockContextV2 82+52*(n-1) The last block in this batch +// * l2Transactions dynamic bytes 82+52*n L2 transactions for this batch + const ( - blobPayloadV7EncodedLength = 8 + 2*common.HashLength + 8 + 2 + blobPayloadV7MinEncodedLength = 8 + 2*common.HashLength + 8 + 2 blobPayloadV7OffsetInitialL1MessageIndex = 0 - blobPayloadV7OffsetInitialL1MessageQueue = blobPayloadV7OffsetInitialL1MessageIndex + 8 - blobPayloadV7OffsetLastL1MessageQueue = blobPayloadV7OffsetInitialL1MessageQueue + common.HashLength - blobPayloadV7OffsetInitialL2BlockNumber = blobPayloadV7OffsetLastL1MessageQueue + common.HashLength - blobPayloadV7OffsetNumBlocks = blobPayloadV7OffsetInitialL2BlockNumber + 8 - blobPayloadV7OffsetBlocks = blobPayloadV7OffsetNumBlocks + 2 + blobPayloadV7OffsetInitialL1MessageQueue = 8 + blobPayloadV7OffsetLastL1MessageQueue = 40 + blobPayloadV7OffsetInitialL2BlockNumber = 72 + blobPayloadV7OffsetNumBlocks = 80 + blobPayloadV7OffsetBlocks = 82 ) +// Below is the encoding for DABlockV7, total 52 bytes. +// * Field Bytes Type Index Comments +// * blockNumber 8 uint64 0 The height of this block. +// * timestamp 8 uint64 0 The timestamp of this block. +// * baseFee 32 uint256 8 The base fee of this block. +// * gasLimit 8 uint64 40 The gas limit of this block. +// * numTransactions 2 uint16 48 The number of transactions in this block, both L1 & L2 txs. +// * numL1Messages 2 uint16 50 The number of l1 messages in this block. + const ( - daBlockV7BlockContextByteSize = 52 - daBlockV7OffsetTimestamp = 0 - daBlockV7OffsetBaseFee = daBlockV7OffsetTimestamp + 8 - daBlockV7OffsetGasLimit = daBlockV7OffsetBaseFee + 32 - daBlockV7numTransactionsOffset = daBlockV7OffsetGasLimit + 8 - daBlockV7numL1MessagesOffset = daBlockV7numTransactionsOffset + 2 + daBlockV7BlockContextEncodedLength = 52 + daBlockV7OffsetTimestamp = 0 + daBlockV7OffsetBaseFee = 8 + daBlockV7OffsetGasLimit = 40 + daBlockV7OffsetNumTransactions = 48 + daBlockV7OffsetNumL1Messages = 50 ) -// daBatchV3 contains metadata about a batch of DAChunks. +// daBatchV7 contains V7 batch metadata and payload. type daBatchV7 struct { version CodecVersion batchIndex uint64 @@ -57,7 +95,6 @@ type daBatchV7 struct { blobBytes []byte } -// newDABatchV7 is a constructor for daBatchV7 that calls blobDataProofForPICircuit internally. func newDABatchV7(version CodecVersion, batchIndex uint64, blobVersionedHash, parentBatchHash common.Hash, blob *kzg4844.Blob, blobBytes []byte) (*daBatchV7, error) { daBatch := &daBatchV7{ version: version, @@ -84,7 +121,7 @@ func decodeDABatchV7(data []byte) (*daBatchV7, error) { return newDABatchV7(version, batchIndex, blobVersionedHash, parentBatchHash, nil, nil) } -// Encode serializes the DABatchV3 into bytes. +// Encode serializes the dABatchV7 into bytes. func (b *daBatchV7) Encode() []byte { batchBytes := make([]byte, daBatchV7EncodedLength) batchBytes[daBatchOffsetVersion] = byte(b.version) @@ -115,7 +152,7 @@ func (b *daBatchV7) BlobBytes() []byte { return b.blobBytes } -// MarshalJSON implements the custom JSON serialization for daBatchV3. +// MarshalJSON implements the custom JSON serialization for daBatchV7. // This method is designed to provide prover with batch info in snake_case format. func (b *daBatchV7) MarshalJSON() ([]byte, error) { type daBatchV7JSON struct { @@ -159,8 +196,8 @@ type blobPayloadV7 struct { blocks []*Block // used for decoding - daBlocks []DABlock - transactions []types.Transactions + daBlocks []DABlock + l2Transactions []types.Transactions } func (b *blobPayloadV7) Blocks() []DABlock { @@ -168,7 +205,7 @@ func (b *blobPayloadV7) Blocks() []DABlock { } func (b *blobPayloadV7) Transactions() []types.Transactions { - return b.transactions + return b.l2Transactions } func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { @@ -176,7 +213,7 @@ func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { } func (b *blobPayloadV7) Encode() ([]byte, error) { - payloadBytes := make([]byte, blobPayloadV7EncodedLength) + payloadBytes := make([]byte, blobPayloadV7MinEncodedLength) binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetInitialL1MessageQueue], b.initialL1MessageIndex) copy(payloadBytes[blobPayloadV7OffsetInitialL1MessageQueue:blobPayloadV7OffsetLastL1MessageQueue], b.initialL1MessageQueueHash[:]) @@ -188,7 +225,11 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { var transactionBytes []byte for _, block := range b.blocks { - daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), block.NumL1MessagesNoSkipping()) + numL1Messages, _, err := block.NumL1MessagesNoSkipping() + if err != nil { + return nil, fmt.Errorf("failed to get numL1Messages: %w", err) + } + daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) payloadBytes = append(payloadBytes, daBlock.Encode()...) // encode L2 txs as RLP and append to transactionBytes @@ -210,8 +251,8 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { - if len(data) < blobPayloadV7EncodedLength { - return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7EncodedLength, len(data)) + if len(data) < blobPayloadV7MinEncodedLength { + return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7MinEncodedLength, len(data)) } initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetInitialL1MessageQueue]) @@ -221,13 +262,17 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { initialL2BlockNumber := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks]) numBlocks := int(binary.BigEndian.Uint16(data[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks])) + if len(data) < blobPayloadV7OffsetBlocks+daBlockV7BlockContextEncodedLength*numBlocks { + return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7OffsetBlocks+daBlockV7BlockContextEncodedLength*numBlocks, len(data)) + } + // decode DA Blocks from the blob daBlocks := make([]DABlock, numBlocks) for i := uint64(0); i < uint64(numBlocks); i++ { daBlock := newDABlockV7WithNumber(initialL2BlockNumber + i) - startBytes := blobPayloadV7OffsetBlocks + i*daBlockV7BlockContextByteSize - endBytes := startBytes + daBlockV7BlockContextByteSize + startBytes := blobPayloadV7OffsetBlocks + i*daBlockV7BlockContextEncodedLength + endBytes := startBytes + daBlockV7BlockContextEncodedLength if err := daBlock.Decode(data[startBytes:endBytes]); err != nil { return nil, fmt.Errorf("failed to decode DA block: %w", err) } @@ -235,8 +280,8 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { daBlocks = append(daBlocks, daBlock) } - // decode transactions for each block from the blob - txBytes := data[blobPayloadV7OffsetBlocks+daBlockV7BlockContextByteSize*numBlocks:] + // decode l2Transactions for each block from the blob + txBytes := data[blobPayloadV7OffsetBlocks+daBlockV7BlockContextEncodedLength*numBlocks:] curIndex := 0 var transactions []types.Transactions @@ -264,7 +309,7 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { initialL1MessageQueueHash: initialL1MessageQueueHash, lastL1MessageQueueHash: lastL1MessageQueueHash, daBlocks: daBlocks, - transactions: transactions, + l2Transactions: transactions, }, nil } @@ -296,28 +341,28 @@ func newDABlockV7WithNumber(number uint64) *daBlockV7 { // Encode serializes the DABlock into a slice of bytes. func (b *daBlockV7) Encode() []byte { - daBlockBytes := make([]byte, daBlockV7BlockContextByteSize) + daBlockBytes := make([]byte, daBlockV7BlockContextEncodedLength) binary.BigEndian.PutUint64(daBlockBytes[daBlockV7OffsetTimestamp:daBlockV7OffsetBaseFee], b.timestamp) if b.baseFee != nil { b.baseFee.FillBytes(daBlockBytes[daBlockV7OffsetBaseFee:daBlockV7OffsetGasLimit]) } - binary.BigEndian.PutUint64(daBlockBytes[daBlockV7OffsetGasLimit:daBlockV7numTransactionsOffset], b.gasLimit) - binary.BigEndian.PutUint16(daBlockBytes[daBlockV7numTransactionsOffset:daBlockV7numL1MessagesOffset], b.numTransactions) - binary.BigEndian.PutUint16(daBlockBytes[daBlockV7numL1MessagesOffset:], b.numL1Messages) + binary.BigEndian.PutUint64(daBlockBytes[daBlockV7OffsetGasLimit:daBlockV7OffsetNumTransactions], b.gasLimit) + binary.BigEndian.PutUint16(daBlockBytes[daBlockV7OffsetNumTransactions:daBlockV7OffsetNumL1Messages], b.numTransactions) + binary.BigEndian.PutUint16(daBlockBytes[daBlockV7OffsetNumL1Messages:], b.numL1Messages) return daBlockBytes } // Decode populates the fields of a DABlock from a byte slice. func (b *daBlockV7) Decode(data []byte) error { - if len(data) != daBlockV7BlockContextByteSize { - return fmt.Errorf("block encoding is not blockContextByteSize bytes long expected %d, got %d", daBlockV7BlockContextByteSize, len(data)) + if len(data) != daBlockV7BlockContextEncodedLength { + return fmt.Errorf("block encoding is not blockContextByteSize bytes long expected %d, got %d", daBlockV7BlockContextEncodedLength, len(data)) } b.timestamp = binary.BigEndian.Uint64(data[daBlockV7OffsetTimestamp:daBlockV7OffsetBaseFee]) b.baseFee = new(big.Int).SetBytes(data[daBlockV7OffsetBaseFee:daBlockV7OffsetGasLimit]) - b.gasLimit = binary.BigEndian.Uint64(data[daBlockV7OffsetGasLimit:daBlockV7numTransactionsOffset]) - b.numTransactions = binary.BigEndian.Uint16(data[daBlockV7numTransactionsOffset:daBlockV7numL1MessagesOffset]) - b.numL1Messages = binary.BigEndian.Uint16(data[daBlockV7numL1MessagesOffset:]) + b.gasLimit = binary.BigEndian.Uint64(data[daBlockV7OffsetGasLimit:daBlockV7OffsetNumTransactions]) + b.numTransactions = binary.BigEndian.Uint16(data[daBlockV7OffsetNumTransactions:daBlockV7OffsetNumL1Messages]) + b.numL1Messages = binary.BigEndian.Uint16(data[daBlockV7OffsetNumL1Messages:]) return nil } diff --git a/encoding/da.go b/encoding/da.go index 2ea2025..b65e668 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -140,15 +140,35 @@ func (b *Block) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { // NumL1MessagesNoSkipping returns the number of L1 messages in this block. // This method assumes that L1 messages can't be skipped. -func (b *Block) NumL1MessagesNoSkipping() uint16 { +func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, error) { var count uint16 + var prevQueueIndex *uint64 + for _, txData := range b.Transactions { - if txData.Type == types.L1MessageTxType { + if txData.Type != types.L1MessageTxType { + continue + } + + // If prevQueueIndex is nil, it means this is the first L1 message in the block. + if prevQueueIndex == nil { + prevQueueIndex = &txData.Nonce count++ + continue } + + // Check if the queue index is consecutive. + if txData.Nonce != *prevQueueIndex+1 { + return 0, 0, fmt.Errorf("unexpected queue index: expected %d, got %d", *prevQueueIndex+1, txData.Nonce) + } + + count++ + prevQueueIndex = &txData.Nonce } - return count + if prevQueueIndex == nil { + return 0, 0, nil + } + return count, *prevQueueIndex, nil } // NumL2Transactions returns the number of L2 transactions in this block. @@ -683,7 +703,7 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin } else if !config.IsEuclid(blockTimestamp) { return CodecV4 } else { - // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly + // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly return CodecV6 } } From c9c1a4404e26e3272543b4cd3ef6d9894ee512ad Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Thu, 30 Jan 2025 15:20:44 +0800 Subject: [PATCH 11/47] add sanity checks for blob payload generation --- encoding/codecv7_types.go | 80 +++++++++++++++++++++++++++++++++++---- 1 file changed, 72 insertions(+), 8 deletions(-) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 9449590..0cc9727 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -9,6 +9,7 @@ import ( "github.com/klauspost/compress/zstd" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -44,7 +45,7 @@ const ( blobEnvelopeV7OffsetPayload = 5 ) -// Below is the encoding format for the batch metadata and blocks. +// Below is the encoding for blobPayloadV7. // * Field Bytes Type Index Comments // * initialL1MessageIndex 8 uint64 0 Queue index of the first L1 message contained in this batch // * initialL1MessageQueueHash 32 bytes32 8 Hash of the L1 message queue at the last message in the previous batch @@ -219,26 +220,57 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { copy(payloadBytes[blobPayloadV7OffsetInitialL1MessageQueue:blobPayloadV7OffsetLastL1MessageQueue], b.initialL1MessageQueueHash[:]) copy(payloadBytes[blobPayloadV7OffsetLastL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber], b.lastL1MessageQueueHash[:]) - blockNumber := b.blocks[0].Header.Number.Uint64() - binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks], blockNumber) + var initialL2BlockNumber uint64 + if len(b.blocks) > 0 { + initialL2BlockNumber = b.blocks[0].Header.Number.Uint64() + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks], initialL2BlockNumber) + } binary.BigEndian.PutUint16(payloadBytes[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks], uint16(len(b.blocks))) + l1MessageIndex := b.initialL1MessageIndex + var l1Messages []*types.L1MessageTx + var transactionBytes []byte - for _, block := range b.blocks { - numL1Messages, _, err := block.NumL1MessagesNoSkipping() + for i, block := range b.blocks { + // sanity check: block numbers are contiguous + if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { + return nil, fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) + } + + // sanity check (within NumL1MessagesNoSkipping): L1 message indices are contiguous within a block + numL1Messages, highestQueueIndex, err := block.NumL1MessagesNoSkipping() if err != nil { return nil, fmt.Errorf("failed to get numL1Messages: %w", err) } + // sanity check: L1 message indices are contiguous across blocks boundaries + if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex { + return nil, fmt.Errorf("failed to sanity check L1 messages count: l1MessageIndex + numL1Messages != highestQueueIndex: %d + %d != %d", l1MessageIndex, numL1Messages, highestQueueIndex) + } + l1MessageIndex = highestQueueIndex + daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) payloadBytes = append(payloadBytes, daBlock.Encode()...) // encode L2 txs as RLP and append to transactionBytes - for _, tx := range block.Transactions { - if tx.Type == types.L1MessageTxType { + for _, txData := range block.Transactions { + if txData.Type == types.L1MessageTxType { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) + } + + l1Messages = append(l1Messages, &types.L1MessageTx{ + QueueIndex: txData.Nonce, + Gas: txData.Gas, + To: txData.To, + Value: txData.Value.ToInt(), + Data: data, + // Sender: , TODO: is this needed? + }) continue } - rlpTxData, err := convertTxDataToRLPEncoding(tx) + rlpTxData, err := convertTxDataToRLPEncoding(txData) if err != nil { return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } @@ -247,9 +279,41 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } payloadBytes = append(payloadBytes, transactionBytes...) + // sanity check: initialL1MessageQueueHash+apply(L1Messages) = lastL1MessageQueueHash + if applyL1Messages(b.initialL1MessageQueueHash, l1Messages) != b.lastL1MessageQueueHash { + return nil, fmt.Errorf("failed to sanity check L1 messages after applying all L1 messages: expected %s, got %s", applyL1Messages(b.initialL1MessageQueueHash, l1Messages), b.lastL1MessageQueueHash) + } + return payloadBytes, nil } +func applyL1Messages(initialQueueHash common.Hash, messages []*types.L1MessageTx) common.Hash { + rollingHash := initialQueueHash + for _, message := range messages { + rollingHash = applyL1Message(rollingHash, message) + } + + return rollingHash +} + +func applyL1Message(initialQueueHash common.Hash, message *types.L1MessageTx) common.Hash { + rollingHash := crypto.Keccak256Hash(initialQueueHash.Bytes(), types.NewTx(message).Hash().Bytes()) + + return encodeRollingHash(rollingHash) +} + +func encodeRollingHash(rollingHash common.Hash) common.Hash { + // clear last 36 bits + rollingHash[26] &= 0xF0 + rollingHash[27] = 0 + rollingHash[28] = 0 + rollingHash[29] = 0 + rollingHash[30] = 0 + rollingHash[31] = 0 + + return rollingHash +} + func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { if len(data) < blobPayloadV7MinEncodedLength { return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7MinEncodedLength, len(data)) From e980b3df8488fd0269c5197ad105a37e94c59e58 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:01:30 +0800 Subject: [PATCH 12/47] fix few small bugs uncovered by unit tests --- encoding/codecv7.go | 10 +++++++++- encoding/codecv7_types.go | 28 +++++++++++++++++++--------- encoding/da.go | 2 +- encoding/interfaces.go | 2 ++ 4 files changed, 31 insertions(+), 11 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 3effbab..c67d485 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -129,7 +129,7 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b if len(blobBytes) > maxEffectiveBlobBytes { log.Error("ConstructBlob: Blob payload exceeds maximum size", "size", len(blobBytes), "blobBytes", hex.EncodeToString(blobBytes)) - return nil, common.Hash{}, nil, errors.New("blob exceeds maximum size") + return nil, common.Hash{}, nil, fmt.Errorf("blob exceeds maximum size: got %d, allowed %d", len(blobBytes), maxEffectiveBlobBytes) } // convert raw data to BLSFieldElements @@ -267,6 +267,14 @@ func (d *DACodecV7) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + if len(b.Chunks) != 0 { + return false, errors.New("batch must not contain any chunks") + } + + if len(b.Blocks) == 0 { + return false, errors.New("batch must contain at least one block") + } + return d.checkCompressedDataCompatibility(b) } diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 0cc9727..51608f8 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -201,6 +201,17 @@ type blobPayloadV7 struct { l2Transactions []types.Transactions } +func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { + return b.initialL1MessageIndex +} +func (b *blobPayloadV7) InitialL1MessageQueueHash() common.Hash { + return b.initialL1MessageQueueHash +} + +func (b *blobPayloadV7) LastL1MessageQueueHash() common.Hash { + return b.lastL1MessageQueueHash +} + func (b *blobPayloadV7) Blocks() []DABlock { return b.daBlocks } @@ -209,10 +220,6 @@ func (b *blobPayloadV7) Transactions() []types.Transactions { return b.l2Transactions } -func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { - return b.initialL1MessageIndex -} - func (b *blobPayloadV7) Encode() ([]byte, error) { payloadBytes := make([]byte, blobPayloadV7MinEncodedLength) @@ -243,10 +250,12 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { return nil, fmt.Errorf("failed to get numL1Messages: %w", err) } // sanity check: L1 message indices are contiguous across blocks boundaries - if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex { - return nil, fmt.Errorf("failed to sanity check L1 messages count: l1MessageIndex + numL1Messages != highestQueueIndex: %d + %d != %d", l1MessageIndex, numL1Messages, highestQueueIndex) + if numL1Messages > 0 { + if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex { + return nil, fmt.Errorf("failed to sanity check L1 messages count: l1MessageIndex + numL1Messages != highestQueueIndex: %d + %d != %d", l1MessageIndex, numL1Messages, highestQueueIndex) + } + l1MessageIndex = highestQueueIndex } - l1MessageIndex = highestQueueIndex daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) payloadBytes = append(payloadBytes, daBlock.Encode()...) @@ -281,7 +290,7 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { // sanity check: initialL1MessageQueueHash+apply(L1Messages) = lastL1MessageQueueHash if applyL1Messages(b.initialL1MessageQueueHash, l1Messages) != b.lastL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check L1 messages after applying all L1 messages: expected %s, got %s", applyL1Messages(b.initialL1MessageQueueHash, l1Messages), b.lastL1MessageQueueHash) + return nil, fmt.Errorf("failed to sanity check lastL1MessageQueueHash after applying all L1 messages: expected %s, got %s", applyL1Messages(b.initialL1MessageQueueHash, l1Messages), b.lastL1MessageQueueHash) } return payloadBytes, nil @@ -331,7 +340,7 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { } // decode DA Blocks from the blob - daBlocks := make([]DABlock, numBlocks) + daBlocks := make([]DABlock, 0, numBlocks) for i := uint64(0); i < uint64(numBlocks); i++ { daBlock := newDABlockV7WithNumber(initialL2BlockNumber + i) @@ -435,6 +444,7 @@ func (b *daBlockV7) Decode(data []byte) error { func decompressV7Bytes(compressedBytes []byte) ([]byte, error) { var res []byte + compressedBytes = append(zstdMagicNumber, compressedBytes...) r := bytes.NewReader(compressedBytes) zr, err := zstd.NewReader(r) if err != nil { diff --git a/encoding/da.go b/encoding/da.go index b65e668..71ad2c4 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -385,7 +385,7 @@ func TxsToTxsData(txs types.Transactions) []*types.TransactionData { // (require specified frame header and each block is compressed) func checkCompressedDataCompatibility(data []byte) error { if len(data) < 16 { - return fmt.Errorf("too small size (%x), what is it?", data) + return fmt.Errorf("too small size (0x%x), what is it?", data) } fheader := data[0] diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 84d345d..f72470a 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -45,6 +45,8 @@ type DABlobPayload interface { Blocks() []DABlock Transactions() []types.Transactions InitialL1MessageIndex() uint64 + InitialL1MessageQueueHash() common.Hash + LastL1MessageQueueHash() common.Hash } // Codec represents the interface for encoding and decoding DA-related structures. From 0e930c6d7169208723a7fd5f5996ddb5cc1a946b Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:13:20 +0800 Subject: [PATCH 13/47] upgrade to latest l2geth version and add correct getter for CodecV7 in CodecFromConfig --- encoding/interfaces.go | 3 +-- encoding/interfaces_test.go | 32 ++++++++++++++++++++++++++++++++ go.mod | 2 +- go.sum | 2 ++ 4 files changed, 36 insertions(+), 3 deletions(-) diff --git a/encoding/interfaces.go b/encoding/interfaces.go index f72470a..29cab0e 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -118,8 +118,7 @@ func CodecFromVersion(version CodecVersion) (Codec, error) { // CodecFromConfig determines and returns the appropriate codec based on chain configuration, block number, and timestamp. func CodecFromConfig(chainCfg *params.ChainConfig, startBlockNumber *big.Int, startBlockTimestamp uint64) Codec { - // TODO: replace with correct fork - if chainCfg.IsEuclid(startBlockTimestamp) { + if chainCfg.IsEuclidV2(startBlockTimestamp) { return &DACodecV7{} } else if chainCfg.IsEuclid(startBlockTimestamp) { // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly diff --git a/encoding/interfaces_test.go b/encoding/interfaces_test.go index 82ce8b9..c965781 100644 --- a/encoding/interfaces_test.go +++ b/encoding/interfaces_test.go @@ -21,6 +21,9 @@ func TestCodecFromVersion(t *testing.T) { {"CodecV2", CodecV2, &DACodecV2{}, false}, {"CodecV3", CodecV3, &DACodecV3{}, false}, {"CodecV4", CodecV4, &DACodecV4{}, false}, + {"CodecV5", CodecV5, &DACodecV5{}, false}, + {"CodecV6", CodecV6, &DACodecV6{}, false}, + {"CodecV7", CodecV7, &DACodecV7{}, false}, {"InvalidCodec", CodecVersion(99), nil, true}, } @@ -45,6 +48,35 @@ func TestCodecFromConfig(t *testing.T) { timestamp uint64 want Codec }{ + { + name: "EuclidV2 active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + DarwinTime: new(uint64), + DarwinV2Time: new(uint64), + EuclidTime: new(uint64), + EuclidV2Time: new(uint64), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV7{}, + }, + { + name: "Euclid active", + config: ¶ms.ChainConfig{ + LondonBlock: big.NewInt(0), + BernoulliBlock: big.NewInt(0), + CurieBlock: big.NewInt(0), + DarwinTime: new(uint64), + DarwinV2Time: new(uint64), + EuclidTime: new(uint64), + }, + blockNum: big.NewInt(0), + timestamp: 0, + want: &DACodecV6{}, + }, { name: "DarwinV2 active", config: ¶ms.ChainConfig{ diff --git a/go.mod b/go.mod index 3b098f2..bec6338 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/agiledragon/gomonkey/v2 v2.12.0 - github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e + github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47 github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 48b0e95..86ee84a 100644 --- a/go.sum +++ b/go.sum @@ -80,6 +80,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e h1:g8jtcGiHbjWYh/V7O245IDho3WfQT4CwEpBV+MhYDrg= github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47 h1:JbYYsGZMU1yAYsF/Ds6STMlItYy0SXj86nqdXC7nHyk= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47/go.mod h1:8WbNuuUjie/LTdFXGGT7Z711MRW8Vv2zWLrcibg7hDc= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= From 5d200f3be8d022db06d69b47ac5b5b2da1d9e6dd Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:13:43 +0800 Subject: [PATCH 14/47] fix linter warnings --- encoding/codecv7.go | 13 ------------- encoding/da.go | 5 +++++ encoding/da_test.go | 1 + 3 files changed, 6 insertions(+), 13 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index c67d485..c2aaa75 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -37,9 +37,6 @@ func (d *DACodecV7) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) if err != nil { return nil, fmt.Errorf("failed to calculate number of L1 messages: %w", err) } - if numL1Messages > math.MaxUint16 { - return nil, errors.New("number of L1 messages exceeds max uint16") - } if totalL1MessagePoppedBefore+uint64(numL1Messages) != highestQueueIndex { return nil, fmt.Errorf("failed to sanity check L1 messages count: totalL1MessagePoppedBefore + numL1Messages != highestQueueIndex: %d + %d != %d", totalL1MessagePoppedBefore, numL1Messages, highestQueueIndex) } @@ -218,16 +215,6 @@ func (d *DACodecV7) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { } func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx) error { - payload, err := d.DecodeBlob(blob) - if err != nil { - return fmt.Errorf("failed to decode blob: %w", err) - } - - chunks = append(chunks, &DAChunkRawTx{ - Blocks: payload.Blocks(), - Transactions: payload.Transactions(), - }) - return nil } diff --git a/encoding/da.go b/encoding/da.go index 71ad2c4..3abe6de 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -3,7 +3,9 @@ package encoding import ( "bytes" "encoding/binary" + "errors" "fmt" + "math" "math/big" "slices" @@ -161,6 +163,9 @@ func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, error) { return 0, 0, fmt.Errorf("unexpected queue index: expected %d, got %d", *prevQueueIndex+1, txData.Nonce) } + if count == math.MaxUint16 { + return 0, 0, errors.New("number of L1 messages exceeds max uint16") + } count++ prevQueueIndex = &txData.Nonce } diff --git a/encoding/da_test.go b/encoding/da_test.go index f301ed0..5c5c150 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -21,6 +21,7 @@ func TestMain(m *testing.M) { log.Root().SetHandler(glogger) m.Run() + os.Exit(0) } func TestUtilFunctions(t *testing.T) { From 5292e3c330af2ce9762e408b478959553db406bc Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:13:51 +0800 Subject: [PATCH 15/47] add unit tests --- encoding/codecv7_test.go | 918 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 918 insertions(+) create mode 100644 encoding/codecv7_test.go diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go new file mode 100644 index 0000000..3544bd0 --- /dev/null +++ b/encoding/codecv7_test.go @@ -0,0 +1,918 @@ +package encoding + +import ( + "encoding/hex" + "encoding/json" + "fmt" + "math/big" + "math/rand" + "strings" + "testing" + + "github.com/agiledragon/gomonkey/v2" + "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/common/hexutil" + "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestCodecV7DABlockEncodeDecode tests the encoding and decoding of daBlockV7. +func TestCodecV7DABlockEncodeDecode(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + testCases := []struct { + name string + blockJSONFile string + expectedEncode string + blockNumber uint64 + totalL1MessagePoppedBefore uint64 + err string + }{ + { + name: "Empty Block", + blockJSONFile: "", + expectedEncode: "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + blockNumber: 0, + }, + { + name: "Blocktrace 02", + blockJSONFile: "testdata/blockTrace_02.json", + expectedEncode: "0000000063807b2a0000000000000000000000000000000000000000000000000000000000001de9000355418d1e818400020000", + blockNumber: 2, + }, + { + name: "Blocktrace 03", + blockJSONFile: "testdata/blockTrace_03.json", + expectedEncode: "0000000063807b2d0000000000000000000000000000000000000000000000000000000000001a2c0003546c3cbb39e500010000", + blockNumber: 3, + }, + { + name: "Blocktrace 04 - 1 L1 message + 2 L2 tx", + blockJSONFile: "testdata/blockTrace_04.json", + expectedEncode: "00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a120000020001", + blockNumber: 13, + totalL1MessagePoppedBefore: 9, + }, + { + name: "Blocktrace 05 - 5 consecutive L1 messages", + blockJSONFile: "testdata/blockTrace_05.json", + expectedEncode: "00000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120000050005", + blockNumber: 17, + totalL1MessagePoppedBefore: 36, + }, + { + name: "Blocktrace 06 - 3 L1 messages with skipping (error)", + blockJSONFile: "testdata/blockTrace_06.json", + blockNumber: 17, + totalL1MessagePoppedBefore: 0, + err: "unexpected queue index", + }, + { + name: "Blocktrace 07 - 2 L1 messages with skipping (error)", + blockJSONFile: "testdata/blockTrace_07.json", + blockNumber: 17, + totalL1MessagePoppedBefore: 0, + err: "unexpected queue index", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var daBlock DABlock + if tc.blockJSONFile == "" { + daBlock = &daBlockV7{} + } else { + block := readBlockFromJSON(t, tc.blockJSONFile) + daBlock, err = codecV7.NewDABlock(block, tc.totalL1MessagePoppedBefore) + if tc.err == "" { + require.NoError(t, err) + } else { + require.ErrorContains(t, err, tc.err) + return + } + } + + encoded := daBlock.Encode() + require.Equal(t, tc.expectedEncode, hex.EncodeToString(encoded)) + + blockDecoded := newDABlockV7WithNumber(tc.blockNumber) + require.NoError(t, blockDecoded.Decode(encoded)) + assertEqualDABlocks(t, daBlock, blockDecoded) + }) + } +} + +// TestCodecV7DABatchHashEncodeDecode tests the hash, encoding and decoding of daBatchV7. +// It also tests the creation of daBatchV7 FromBytes and FromParams. +func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + testCases := []struct { + name string + batch *Batch + expectedEncode string + expectedHash string + creationErr string + }{ + { + name: "Empty Batch, creation error=no blocks", + batch: &Batch{}, + creationErr: "batch must contain at least one block", + }, + { + name: "Batch with 1 block,blocktrace 02", + batch: &Batch{ + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, + }, + expectedEncode: "070000000000000000018c671159176b607e2ec8333a37e1e58593fa6af330e533b45fa440b6b6399c0000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0xe43674f92aee5921602ccbfe555810ab3780b1df847eb7d8f52bce35ee42e709", + }, + { + name: "Batch with 1 block, blocktrace 06, creation error=L1 messages not consecutive", + batch: &Batch{ + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_06.json")}, + }, + creationErr: "unexpected queue index", + }, + { + name: "Batch with 4 blocks, blocktrace 02, 03, 04", + batch: &Batch{ + InitialL1MessageIndex: 9, + LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + Blocks: []*Block{ + readBlockFromJSON(t, "testdata/blockTrace_02.json"), + readBlockFromJSON(t, "testdata/blockTrace_03.json"), + replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4), + }, + }, + expectedEncode: "07000000000000000001feee34d945b6b7020630c7559303cc5a5d5b52be7111998d3e829948cf44390000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0xf547c2b7c24d0094a51c3e3eed36462a08d6c8558e3defd666d38717d0354cad", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + daBatchV7i, err := codecV7.NewDABatch(tc.batch) + if tc.creationErr != "" { + require.ErrorContains(t, err, tc.creationErr) + return + } + + require.NoError(t, err) + daBatchV7c := daBatchV7i.(*daBatchV7) + + encoded := daBatchV7c.Encode() + require.Equal(t, tc.expectedEncode, hex.EncodeToString(encoded)) + require.Equal(t, tc.expectedHash, daBatchV7c.Hash().Hex()) + + // test DABatchFromBytes + batchDecoded, err := codecV7.NewDABatchFromBytes(encoded) + batchDecodedV7 := batchDecoded.(*daBatchV7) + require.NoError(t, err) + require.Equal(t, daBatchV7c.version, batchDecodedV7.version) + require.Equal(t, daBatchV7c.batchIndex, batchDecodedV7.batchIndex) + require.Equal(t, daBatchV7c.blobVersionedHash, batchDecodedV7.blobVersionedHash) + require.Equal(t, daBatchV7c.parentBatchHash, batchDecodedV7.parentBatchHash) + require.Nil(t, batchDecodedV7.blob) + require.Nil(t, batchDecodedV7.blobBytes) + require.Equal(t, daBatchV7c.Hash(), batchDecoded.Hash()) + require.Equal(t, daBatchV7c.Encode(), batchDecoded.Encode()) + + // test DABatchFromParams + batchFromParams, err := codecV7.NewDABatchFromParams(daBatchV7c.batchIndex, daBatchV7c.blobVersionedHash, daBatchV7c.parentBatchHash) + require.NoError(t, err) + batchFromParamsV7 := batchFromParams.(*daBatchV7) + require.Equal(t, daBatchV7c.version, batchFromParamsV7.version) + require.Equal(t, daBatchV7c.batchIndex, batchFromParamsV7.batchIndex) + require.Equal(t, daBatchV7c.blobVersionedHash, batchFromParamsV7.blobVersionedHash) + require.Equal(t, daBatchV7c.parentBatchHash, batchFromParamsV7.parentBatchHash) + require.Nil(t, batchFromParamsV7.blob) + require.Nil(t, batchFromParamsV7.blobBytes) + require.Equal(t, daBatchV7c.Hash(), batchFromParams.Hash()) + require.Equal(t, daBatchV7c.Encode(), batchFromParams.Encode()) + }) + } +} + +func TestCodecV7BlobEncodingAndHashing(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + require.EqualValues(t, CodecV7, codecV7.Version()) + + testCases := []struct { + name string + batch *Batch + creationErr string + expectedBlobEncode string + expectedBlobVersionedHash string + }{ + { + name: "Empty batch", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 0, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.Hash{}, + Blocks: []*Block{}, + }, + creationErr: "batch must contain at least one block", + }, + { + name: "Batch with 1 block, blocktrace 02", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 0, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.Hash{}, + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, + }, + expectedBlobEncode: "0007f9000001606c009d0700240e000002000163807b2a1de9000355418d1e81008400020000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e002adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa7008e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19fea00cd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf871010100bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f007b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bc00ec5bd4aba684835996fc3f879380aac1c09c6eed32f105006032821d60094200a4b00e450116", + expectedBlobVersionedHash: "0x018c671159176b607e2ec8333a37e1e58593fa6af330e533b45fa440b6b6399c", + }, + { + name: "Batch with 1 blocks, blocktrace 04 - 1 L1 message + 1 L2 tx", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 9, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, + }, + expectedBlobEncode: "00076400000120a6fd0200e4040000090097f93d31db48682539b6a399f76a8e00f13b04d40cdd2b12d611774000040001646b6e137a120000020001df0b8082005dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080800600b90000700281c9062076a0f105b", + expectedBlobVersionedHash: "0x019644784d15ae6866197bdede1eaa73a7b3beb6380413286039ee8b1be28c54", + }, + { + name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 9, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, + }, + expectedBlobEncode: "0007670c00016025170d6300a4960000090097f93d31db48682539b6a399f76a008ef13b04d40cdd2b12d61177400002000363807b2a1de9000355418d1e818400000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a03f00b2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f2000bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483005996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a08110378151602083015160408085018051009151939592948301929184648211639083019060208201858179825181118200820188101794825250918201929091019080838360005b83c357818101518300820152602001620000a9565b50505050905090810190601f16f1578082038000516001836020036101000a0319168191508051604051939291900115012b0100460175015b01a39081015185519093508592508491620001c891600391850100906200026b565b508051620001de90600490602084506005805461ff00196000ff1990911660121716905550600680546001600160a01b0380881619928316001790925560078054928716929091169190911790556200023081620002556200010000600160b01b03191633021790555062000307915050565b60ff19166000ff929092565b828160011615610100020316600290049060005260206000200090601f016020900481019282601f10620002ae5780518380011785de016001000185558215620002de579182015b8202de57825182559160200191906001c100565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000200f1565b61119b80620003176000396000f3fe61001004361061010b576000350060e01c80635c975abb116100a257806395d89b411161007114610301578063009dc29fac14610309578063a457c2d714610335578063a9059cbb1461036157008063dd62ed3e1461038d5761010b565b1461029d57806370a08231146102a5005780638456cb59146102cb5780638e50817a146102d3313ce567116100de57001461021d578063395093511461023b5780633f4ba83a1461026757806340c1000f191461027106fdde0314610110578063095ea7b31461018d57806318160d00dd146101cd57806323b872e7575b6101186103bb565b6040805160208082520083518183015283519192839290830161015261013a61017f9250508091039000f35b6101b9600480360360408110156101a381351690602001356104519115001582525190819003602001d561046e60fd81169160208101359091169060400074565b6102256104fb60ff90921640025105046f610552565b005b61026f02008705a956610654d520bb3516610662067d56e90135166106d218610757031f0007b856034b085f77c7d5a308db565b6003805420601f600260001961010060000188161502019095169490940493840181900481028201810190925282815200606093909290918301828280156104475780601f1061041c57610100808354000402835291610447565b825b8154815260200180831161042a57829003601f0016820191565b600061046561045e610906565b848461090a565b506001920200548184f6565b6104f18461048d6104ec8560405180606080602861108560280091398a166000908152600160205260408120906104cb81019190915260400100600020549190610b51565b935460ff160511016000610522908116825260200080830193909352604091820120918c168152925290205490610be8565b60070016331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b1b00ddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090046000ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460600508282610ced909052604006ca0ddd900407260c6b6f6e6c7920466163746f00727960a0079283918216179091559390921660041561080808550e65086c250011176025006108968dd491824080832093909416825233831661094f5704010080806020018281038252602401806110f36024913960400191fd821661099400223d60228084166000819487168084529482529182902085905581518581520091517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac800c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b83008383610f61565b610ac881265f60268685808220939093559084168152205400610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc37008daa952ba7f163c4a11628f55a4df523b3ef9291829003008184841115610b00e08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53616600654d6174683a206164646974696f6e206f766572666c6f7700610c9c147362001690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38a00eae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f2000746865207a65726f72657373610d546000600254610d610255902054610d870080838393519293910e2d6101001790557f62e78cea01bee320cd4e420270b500ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610eb6008260000ef3221b85839020550f199082610fb540805182600091851691912000565b610f6cb07415610fb02a113c602a00610c428383401e7375627472616300815250fe7472616e736665726275726e20616d6f756e742065786365656473002062616c616e6365617070726f7665616c6c6f7766726f6d646563726561730065642062656c6f775061757361626c653a20746f6b656e7768696c652070610075736564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c003bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d9fa007ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00100a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c63003172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda52095d4004b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105008080808080814ba8d130a9149a111111110549d2741105c418e61894eb01120020132d0b639c42c818e2c84840818c8c282848929a39f03038fd0ddb85e2d20015e23516285d6a71d2c1a2a351201ca40facab44851c1fbf00022ce74078cf00901c9845e0306b08cd4a2a70724e1a69542f7aaa8fd851465eda364face7ee001a0754a6938078358317ba99a460e1db3cb338ac8411a449017b478e3c0a8e000987303e5ce118af05cc7d6837a22db87d617154940bcebb0b88ffe71519a900f199051c3311ca347ec728aa797d3ae1518f69a4ff48024239b0a5eb787a4d00ffa1364c67d6c42460452ec2dbd194485fb9412b92e003774762efe150ac4b0058b7a96a03be266a9b0232cb472d206a773ff2364836708dc22081972f855b00edb3d2615426425652e36f684c12dd6db151ec85cb10129135858562adb69900680963a33684e492f40924e051899c9a79d7be012a8e287bd1686fa4546f80006053c5285d46f88c24e22d833b5f0e9cf69042bce0e25184cc90b64b03848400ae8ac12808269c14eaac86cca999bb30fa7d4dd43b8ac07d77c71554e34f5f00caeb72be43b6846bcec40b7f2edd062004352c7cd04b2049c0bcd36a11e22900d60708bcb5ba5cbcc788ea4489695753b85c9c08e8e8e2892a5cc9ae2a30c6009c666c8e3e8b0dfb4351f987aa79da99903218a5f6163147d0b638c045f05a006f1e540b9e4d6e4b60e8f0173ddbebc65d90ecc6f365be8933697ffaadd98b00a50e1aba18c30ce7714aa5ea14a61ef57f083002f22901d55ccd3e4a00d99d004ff8dc792f97644b039c85a6fb092e73113c172604746a3e876bc28c198aec007f57a2997e79c698b4a44bdccd959f16879a88844f5779600991113f0bf5d80052cf30d543afde04180d446e0be885d47f98da5a4a4552abd4956cce8c57bd0046d8ff1451ca8006bc5316ac418f06b831921e3fd9475d9a3e1e78556d09ae0029d5e6a44d56bf62cb3a8d6a40076fae05da1de31549471d4ea874406ebc7a001ef4236793fdc808270f9a6db446e2746a2c83d48156c9f7bdeddd3d1762510035840526201e073d608ca1b900cf8b416d0e", + expectedBlobVersionedHash: "0x01feee34d945b6b7020630c7559303cc5a5d5b52be7111998d3e829948cf4439", + }, + { + name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 36, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + }, + expectedBlobEncode: "0007500c0001600517556200049500002400faa13a9ed8937474556dd2ea36be00845199e823322cd63279a3ba300002000363807b2a1de9000355418d1e8184000002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e900a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d008f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba60084835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783001197e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a0811037815160208301516040808501008051915193959294830192918464018211639083019060208201858179825100811182820188101794825250918201929091019080838360005b83c357818100015183820152602001620000a9565b50505050905090810190601f16f1578000820380516001836020036101000a031916819150805160405193929190011500012b01460175015b01a39081015185519093508592508491620001c891600300918501906200026b565b508051620001de90600490602084506005805461ff00001960ff1990911660121716905550600680546001600160a01b0380881619009283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b60ff00191660ff929092565b828160011615610100020316600290049060005260200060002090601f016020900481019282601f10620002ae5780518380011785de000160010185558215620002de579182015b8202de5782518255916020019190006001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010100620002f1565b61119b80620003176000396000f3fe61001004361061010b570060003560e01c80635c975abb116100a257806395d89b411161007114610301005780639dc29fac14610309578063a457c2d714610335578063a9059cbb1461000361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114006102a55780638456cb59146102cb5780638e50817a146102d3313ce56711610000de571461021d578063395093511461023b5780633f4ba83a146102675780006340c10f191461027106fdde0314610110578063095ea7b31461018d5780630018160ddd146101cd57806323b872e7575b6101186103bb565b6040805160200080825283518183015283519192839290830161015261013a61017f9250508000910390f35b6101b9600480360360408110156101a381351690602001356104005191151582525190819003602001d561046e60fd81169160208101359091160090604074565b6102256104fb60ff90921640025105046f610552565b005b6100026f028705a956610654d520bb3516610662067d56e90135166106d21861070057031f07b856034b085f77c7d5a308db565b6003805420601f600260001961000100600188161502019095169490940493840181900481028201810190925200828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782900003601f16820191565b600061046561045e610906565b848461090a565b506000019202548184f6565b6104f18461048d6104ec8560405180606080602861100085602891398a166000908152600160205260408120906104cb81019190915200604001600020549190610b51565b935460ff160511016000610522908116820052602080830193909352604091820120918c168152925290205490610be856005b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600600064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046610063746f727960a0079283918216179091559390921660041561080808550e6500086c2511176025006108968dd491824080832093909416825233831661094f0057040180806020018281038252602401806110f36024913960400191fd821600610994223d60228084166000819487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602361000a8b838383610f61565b610ac881265f60268685808220939093559084168100522054610af7908220409490945580905191937fddf252ad1be2c89b69c2b00068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484110015610be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f00536166654d6174683a206164646974696f6e206f766572666c6f7700610c9c001473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e53007bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e742000746f20746865207a65726f72657373610d546000600254610d61025590205400610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e42000270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad602100610eb68260000ef3221b85839020550f199082610fb540805182600091851600919120565b610f6cb07415610fb02a113c602a00610c428383401e7375627400726163815250fe7472616e736665726275726e20616d6f756e742065786365006564732062616c616e6365617070726f7665616c6c6f7766726f6d646563720065617365642062656c6f775061757361626c653a20746f6b656e7768696c650020706175736564a2646970667358221220e96342bec8f6c2bf72815a3999890073b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb01124013290b639c42c818e2c888502022230a0a92a4660ef030c800d41b020bd54b2b44d740a07429c5490a168c46d770903e68ae2a15b9317e2c0004189c033b3a4373603602c7ace1342b53011773d24ad36fd15b7de48eb2fc00d206f38470eefe38f032dd02c4ab13bc98c73b0513df4638d9dc250c234d8a00db32725451704a389ceee1bac7b82ea0d843bbf9684157cb16a2a2b4e01cb90080f85f5e91911a1f59c231134399a6ef0045b5ad4f473c6a518d741c49c051000e14e95a4f2fea3fd986399835d10958868b82513345a5afc0d18a04f001bb00a3ddf77028d625b493a92a017f16b545c199a5a3162c6ab71f7935920def0f003008e5e5f770ebf906771895899095d4eb377c27c98cb1d8d0f6c27908d9cd009ac3c2bf566b260d0901a236fee45cfa4420e08c12755dd8763f0214997f9200a331d158a9d600c1a48a53ba8cf01989ce9b5877f638601a2301f1ca8a47260032a76c971a0899b62a8e51c04c782d549e1a4252d3ebc2f55b6b82776a6ae200dd7df7888f3f6169afcbfe0ede12ae73265ef87369340081a9e1e7535f023b0002e6911653164f667d82c014aacbc56f8cea4efc98b23585c7c58d8056289e00abc2e5edea751698738fcde66721607f482a915105793b136706d5d1de43cc00d1a12d06b848bcf6860767c1c3925f101824fc45cf76b0716729fbfb9c9bb300e74c029f9e42f6f253073f5d1c33c3953ba5ae76cadec3f32c8d35807b4acc00de57538f62c3b9e4733fffdecb24d1d2b067cf74ffc2e55d34cde9847cad2700e7089aa031378a45f80af1e9d2cfb08c8dbb14a279c333cc5065927074955b0096b01931b2508f2bf58c523decea3dc0684972fb805ef9fed3bebdba54c6b7004ae1cb66cf78d56b84a74ff1ba9c72c0e4650137a83200452391f0ff7254ba00e98f80b82a83e0daa4daa8dfc4cfabb6b893a918a8e179b580ba783c76e94400c209940e088f7bcf839eefec98b96ccbf44164370b38239db665ec3b5055990012c19674c19b3d92c3db4a1810cf82de3016100381b7d5a0e605", + expectedBlobVersionedHash: "0x014f8d28ab7c68a0a0872636c13b0f473044360dfe43b4f0ab93ce0977cd3a42", + }, + // test error cases + { + name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong initialL1MessageIndex", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 21, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + }, + creationErr: "failed to sanity check L1 messages count", + }, + { + name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong (not consecutive) block number", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 21, + InitialL1MessageQueueHash: common.Hash{}, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_05.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + }, + creationErr: "invalid block number", + }, + { + name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong LastL1MessageQueueHash", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 36, + InitialL1MessageQueueHash: common.Hash{1}, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + }, + creationErr: "failed to sanity check lastL1MessageQueueHash", + }, + { + name: "Batch with 3 blocks, blocktrace 02, 04 + 05 (L1 messages only), but with non-consecutive L1 messages number across blocks 04 and 05", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 9, + InitialL1MessageQueueHash: common.Hash{1}, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 4)}, + }, + creationErr: "failed to sanity check L1 messages count", + }, + { + name: "Batch with 3 blocks, blocktrace 02, 06, but with non-consecutive L1 messages number within block 06", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 9, + InitialL1MessageQueueHash: common.Hash{1}, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_06.json"), 3)}, + }, + creationErr: "unexpected queue index", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var daBatch DABatch + daBatch, err := codecV7.NewDABatch(tc.batch) + if tc.creationErr != "" { + require.ErrorContains(t, err, tc.creationErr) + return + } + require.NoError(t, err) + + // check correctness of blob and blob hash + require.Equal(t, tc.expectedBlobEncode, strings.TrimRight(hex.EncodeToString(daBatch.Blob()[:]), "0")) + require.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), daBatch.(*daBatchV7).blobVersionedHash) + + // check correctness of blob decoding: blobPayload metadata + blobPayload, err := codecV7.DecodeBlob(daBatch.Blob()) + require.NoError(t, err) + + require.Equal(t, tc.batch.InitialL1MessageIndex, blobPayload.InitialL1MessageIndex()) + require.Equal(t, tc.batch.InitialL1MessageQueueHash, blobPayload.InitialL1MessageQueueHash()) + require.Equal(t, tc.batch.LastL1MessageQueueHash, blobPayload.LastL1MessageQueueHash()) + + // check correctness of decoded blocks and transactions + require.Equal(t, len(tc.batch.Blocks), len(blobPayload.Blocks())) + decodedBlocks := blobPayload.Blocks() + for i, block := range tc.batch.Blocks { + numL1Messages, _, err := block.NumL1MessagesNoSkipping() + require.NoError(t, err) + + daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(block.NumL2Transactions())+numL1Messages, numL1Messages) + assertEqualDABlocks(t, daBlock, decodedBlocks[i]) + + txDataDecoded := TxsToTxsData(blobPayload.Transactions()[i]) + var j int + for _, txData := range block.Transactions { + // Decoded blob contains only L2 transactions, L1 transactions need to be read from L1 (by using initialQueueIndex) + // So in this test we skip checking them. + if txData.Type == types.L1MessageTxType { + continue + } + + assertEqualTransactionData(t, txData, txDataDecoded[j]) + j++ + } + } + }) + } +} + +func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + // Apply patches to functions to replace behavior for testing. + { + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + // Always enable compression. + patches.ApplyPrivateMethod(codecV7, "checkCompressedDataCompatibility", func(b *Batch) (bool, error) { + return true, nil + }) + + patches.ApplyMethodFunc(codecV7, "CheckBatchCompressedDataCompatibility", func(b *Batch) (bool, error) { + return codecV7.(*DACodecV7).checkCompressedDataCompatibility(b) + }) + } + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + // Taking into consideration compression, we allow up to 5x of max blob bytes minus 5 byte for the blob envelope header. + // We subtract 82 bytes for the blobPayloadV7 metadata. + //compressableAvailableBytes := maxEffectiveBlobBytes*5 - 5 - 82 + maxAvailableBytesCompressable := 5*maxEffectiveBlobBytes - 5 - 82 + maxAvailableBytesIncompressable := maxEffectiveBlobBytes - 5 - 82 + // 52 bytes for each block as per daBlockV7 encoding. + bytesPerBlock := 52 + + testCases := []struct { + name string + numBlocks int + txData []string + creationErr string + + expectedBlobVersionedHash string + }{ + { + name: "no blocks", + txData: []string{}, + expectedBlobVersionedHash: "0x01b2f5f5d7c4d370e1ec0d48fc0eca148c7a3a3d2cb60164a09c9bcea29051b9", + }, + { + name: "single block, single tx", + numBlocks: 1, + txData: []string{"0x010203"}, + expectedBlobVersionedHash: "0x01e8d5e04eae7327123212f9a549b0ee00514a50102919daa37f079c7c853685", + }, + { + name: "single block, multiple tx", + numBlocks: 1, + txData: []string{"0x010203", "0x040506", "0x070809"}, + expectedBlobVersionedHash: "0x0145c9f4f3954759b572df435c18cc805e06f935e540e2c89c687b232c2428d0", + }, + { + name: "multiple blocks, single tx per block", + numBlocks: 3, + txData: []string{"0x010203"}, + expectedBlobVersionedHash: "0x014879b661d2c0d65f52104f3f1545aed4179a5522b8fa40a00f538d3c26ccc8", + }, + { + name: "multiple blocks, multiple tx per block", + numBlocks: 3, + txData: []string{"0x010203", "0x040506", "0x070809"}, + expectedBlobVersionedHash: "0x01ae9bb3857e66609840d78e3d7ac09f4664ae8e8918da13a8d83e722586402a", + }, + { + name: "thousands of blocks, multiple tx per block", + numBlocks: 10000, + txData: []string{"0x010203", "0x040506", "0x070809"}, + expectedBlobVersionedHash: "0x01be8942fe0a3dc77590c9346866824f94f3e6a3b1774119c1e9720f763ede09", + }, + { + name: "single block, single tx, full blob random data -> data bigger compressed than uncompressed -> error blob exceeds maximum size", + numBlocks: 1, + txData: []string{generateRandomData(maxAvailableBytesIncompressable - bytesPerBlock)}, + creationErr: "blob exceeds maximum size", + }, + { + name: "2 blocks, single tx, full blob random data", + numBlocks: 2, + txData: []string{generateRandomData(maxAvailableBytesIncompressable/2 - bytesPerBlock*2)}, + expectedBlobVersionedHash: "0x01813145647585e490c7d14eab5aec876f2363954956e0b8d4658f211d5d1fbc", + }, + { + name: "single block, single tx, full blob repeat data", + numBlocks: 1, + txData: []string{repeat(0x12, maxAvailableBytesCompressable-bytesPerBlock)}, + expectedBlobVersionedHash: "0x01ac3403d7e4484fd5569c1042956cf2e5cadb03802603f4ce8ae890c4bc2414", + }, + { + name: "2 blocks, single 2, full blob random data", + numBlocks: 2, + txData: []string{repeat(0x12, maxAvailableBytesCompressable/2-bytesPerBlock*2), repeat(0x13, maxAvailableBytesCompressable/2-bytesPerBlock*2)}, + expectedBlobVersionedHash: "0x01c31afe47f81de670e7e8263d1e8e01e452a3afc296528ebb447895d9572238", + }, + { + name: "single block, single tx, full blob random data -> error because 1 byte too big", + numBlocks: 1, + txData: []string{generateRandomData(maxAvailableBytesIncompressable - bytesPerBlock + 1)}, + creationErr: "blob exceeds maximum size", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var blocks []*Block + for i := 0; i < tc.numBlocks; i++ { + block := &Block{ + Header: &types.Header{ + Number: big.NewInt(int64(i)), + }, + Transactions: []*types.TransactionData{}, + } + for _, data := range tc.txData { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + blocks = append(blocks, block) + } + + _, blobVersionedHash, _, err := codecV7.(*DACodecV7).constructBlob(&Batch{Blocks: blocks}) + if tc.creationErr != "" { + require.ErrorContains(t, err, tc.creationErr) + return + } + require.NoError(t, err) + require.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + }) + } +} + +func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + // Apply patches to functions to replace behavior for testing. + { + patches := gomonkey.NewPatches() + defer patches.Reset() + + patches.ApplyFunc(convertTxDataToRLPEncoding, func(txData *types.TransactionData) ([]byte, error) { + data, err := hexutil.Decode(txData.Data) + if err != nil { + return nil, err + } + return data, nil + }) + + patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { + return nil + }) + + // Always disable compression. + patches.ApplyPrivateMethod(codecV7, "checkCompressedDataCompatibility", func(b *Batch) (bool, error) { + return false, nil + }) + + patches.ApplyMethodFunc(codecV7, "CheckBatchCompressedDataCompatibility", func(b *Batch) (bool, error) { + return codecV7.(*DACodecV7).checkCompressedDataCompatibility(b) + }) + } + + repeat := func(element byte, count int) string { + result := make([]byte, 0, count) + for i := 0; i < count; i++ { + result = append(result, element) + } + return "0x" + common.Bytes2Hex(result) + } + + // No compression. max blob bytes minus 5 byte for the blob envelope header. + // We subtract 82 bytes for the blobPayloadV7 metadata. + maxAvailableBytes := maxEffectiveBlobBytes - 5 - 82 + // 52 bytes for each block as per daBlockV7 encoding. + bytesPerBlock := 52 + + testCases := []struct { + name string + numBlocks int + txData []string + creationErr string + + expectedBlobVersionedHash string + }{ + { + name: "no blocks", + txData: []string{}, + expectedBlobVersionedHash: "0x0156a6430f1a7f819f41f4dfda7453c99693670447257f3f3b2f5a07beb47ae9", + }, + { + name: "single block, single tx", + numBlocks: 1, + txData: []string{"0x010203"}, + expectedBlobVersionedHash: "0x011557bb7fdefb1a973d852d4f1c1ab46e46b5028a6f702821972d15a3a7bf36", + }, + { + name: "single block, multiple tx", + numBlocks: 1, + txData: []string{"0x010203", "0x040506", "0x070809"}, + expectedBlobVersionedHash: "0x010506ab63a9d8a3221df8c10fcc83f5fc9c072928b5bbe179386832ac422fa4", + }, + { + name: "multiple blocks, single tx per block", + numBlocks: 3, + txData: []string{"0x010203"}, + expectedBlobVersionedHash: "0x01e1c40d1f432836f394263e1f2a11c0704b2d3d94e99e48f589df45559b39c8", + }, + { + name: "multiple blocks, multiple tx per block", + numBlocks: 3, + txData: []string{"0x010203", "0x040506", "0x070809"}, + expectedBlobVersionedHash: "0x01199ab5ee3c5c212843bffe27f07b0e85de1fc1f4e1fb8a7c4edbeb545397d6", + }, + { + name: "thousands of blocks, multiple tx per block -> too big error", + numBlocks: 10000, + txData: []string{"0x010203", "0x040506", "0x070809"}, + creationErr: "blob exceeds maximum size", + }, + { + name: "single block, single tx, full blob random data", + numBlocks: 1, + txData: []string{generateRandomData(maxAvailableBytes - bytesPerBlock)}, + expectedBlobVersionedHash: "0x01f1aea1fe3f8a37ff505bf3aa5895d959c004087c4573bd99dcbfa035d5eb57", + }, + { + name: "2 blocks, single tx, full blob random data", + numBlocks: 2, + txData: []string{generateRandomData(maxAvailableBytes/2 - bytesPerBlock*2)}, + expectedBlobVersionedHash: "0x016a8c8e6a56f7a2895b3c5f75dd34a4b8248e0b47d60fca576fa60c571a5812", + }, + { + name: "single block, single tx, full blob repeat data", + numBlocks: 1, + txData: []string{repeat(0x12, maxAvailableBytes-bytesPerBlock)}, + expectedBlobVersionedHash: "0x01ddad97c4d0eaa751c9e74d1a4a805da9434802ce61572ac0b5a87074230bc8", + }, + { + name: "2 blocks, 2 tx, full blob random data", + numBlocks: 2, + txData: []string{repeat(0x12, maxAvailableBytes/4-bytesPerBlock*2), repeat(0x13, maxAvailableBytes/4-bytesPerBlock*2)}, + expectedBlobVersionedHash: "0x0126e942bc804b28f9f33c481ef6235e0affcda37be0e4281645067ed2577fe3", + }, + { + name: "single block, single tx, full blob random data -> error because 1 byte too big", + numBlocks: 1, + txData: []string{generateRandomData(maxAvailableBytes - bytesPerBlock + 1)}, + creationErr: "blob exceeds maximum size", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var blocks []*Block + for i := 0; i < tc.numBlocks; i++ { + block := &Block{ + Header: &types.Header{ + Number: big.NewInt(int64(i)), + }, + Transactions: []*types.TransactionData{}, + } + for _, data := range tc.txData { + tx := &types.TransactionData{Type: 0xff, Data: data} + block.Transactions = append(block.Transactions, tx) + } + blocks = append(blocks, block) + } + + _, blobVersionedHash, _, err := codecV7.(*DACodecV7).constructBlob(&Batch{Blocks: blocks}) + if tc.creationErr != "" { + require.ErrorContains(t, err, tc.creationErr) + return + } + require.NoError(t, err) + require.Equal(t, common.HexToHash(tc.expectedBlobVersionedHash), blobVersionedHash) + }) + } +} + +func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + // empty batch + emptyBatch := &Batch{} + // bypass batch validation checks by calling checkCompressedDataCompatibility directly + compatible, err := codecV7.(*DACodecV7).checkCompressedDataCompatibility(emptyBatch) + assert.NoError(t, err) + assert.Equal(t, false, compatible) + + testCases := []struct { + name string + batch *Batch + expectCompatible bool + creationErr string + }{ + { + name: "Single Block 02", + batch: &Batch{ + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, + }, + expectCompatible: true, + }, + { + name: "Single Block 03", + batch: &Batch{ + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_03.json")}, + }, + expectCompatible: true, + }, + { + name: "Single Block 04", + batch: &Batch{ + InitialL1MessageIndex: 9, + LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, + }, + expectCompatible: true, + }, + { + name: "Single Block 05, only L1 messages", + batch: &Batch{ + InitialL1MessageIndex: 36, + LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, + }, + expectCompatible: true, + }, + { + name: "Single Block 06", + batch: &Batch{ + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_06.json")}, + }, + expectCompatible: false, + creationErr: "unexpected queue index", + }, + { + name: "Single Block 07", + batch: &Batch{ + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_07.json")}, + }, + expectCompatible: false, + creationErr: "unexpected queue index", + }, + { + name: "Multiple Blocks 02, 03, 04, 05", + batch: &Batch{ + InitialL1MessageIndex: 9, + LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + Blocks: []*Block{ + readBlockFromJSON(t, "testdata/blockTrace_02.json"), + readBlockFromJSON(t, "testdata/blockTrace_03.json"), + replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4), + }, + }, + expectCompatible: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + compatible, err = codecV7.CheckBatchCompressedDataCompatibility(tc.batch) + if tc.creationErr != "" { + require.ErrorContains(t, err, tc.creationErr) + return + } + require.NoError(t, err) + require.Equal(t, tc.expectCompatible, compatible) + }) + } +} + +func TestCodecV7DABatchJSONMarshalUnmarshal(t *testing.T) { + testCases := []struct { + name string + batch *daBatchV7 + expectedJSON string + }{ + { + name: "Case 01", + batch: &daBatchV7{ + version: CodecV7, + batchIndex: 293212, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + }, + expectedJSON: `{ + "version": 7, + "batch_index": 293212, + "blob_versioned_hash": "0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e", + "parent_batch_hash": "0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee" + }`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + data, err := json.Marshal(tc.batch) + require.NoError(t, err, "Failed to marshal daBatch") + + // Compare marshaled JSON + var expectedJSON, actualJSON map[string]interface{} + err = json.Unmarshal([]byte(tc.expectedJSON), &expectedJSON) + require.NoError(t, err, "Failed to unmarshal expected JSON string") + err = json.Unmarshal(data, &actualJSON) + require.NoError(t, err, "Failed to unmarshal actual JSON string") + + require.Equal(t, expectedJSON, actualJSON, "Marshaled JSON does not match expected JSON") + }) + } +} + +func TestDACodecV7JSONFromBytes(t *testing.T) { + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + daBatch := &daBatchV7{ + version: CodecV7, + batchIndex: 293212, + blobVersionedHash: common.HexToHash("0x0120096572a3007f75c2a3ff82fa652976eae1c9428ec87ec258a8dcc84f488e"), + parentBatchHash: common.HexToHash("0xc37d3f6881f0ca6b02b1dc071483e02d0fe88cf2ff3663bb1ba9aa0dc034faee"), + } + + outputJSON, err := codecV7.JSONFromBytes(daBatch.Encode()) + require.NoError(t, err, "JSONFromBytes failed") + + var outputMap map[string]interface{} + err = json.Unmarshal(outputJSON, &outputMap) + require.NoError(t, err, "Failed to unmarshal output JSON") + + expectedFields := map[string]interface{}{ + "version": float64(daBatch.version), + "batch_index": float64(daBatch.batchIndex), + "blob_versioned_hash": daBatch.blobVersionedHash.Hex(), + "parent_batch_hash": daBatch.parentBatchHash.Hex(), + } + + require.Len(t, outputMap, len(expectedFields), "Unexpected number of fields in output") + for key, expectedValue := range expectedFields { + require.Equal(t, expectedValue, outputMap[key], fmt.Sprintf("Mismatch in field %s", key)) + } +} + +func assertEqualDABlocks(t *testing.T, expected, actual DABlock) { + require.Equal(t, expected.Number(), actual.Number()) + require.Equal(t, expected.NumTransactions(), actual.NumTransactions()) + require.Equal(t, expected.NumL1Messages(), actual.NumL1Messages()) + require.Equal(t, expected.Timestamp(), actual.Timestamp()) + assertEqualBigInt(t, expected.BaseFee(), actual.BaseFee()) + require.Equal(t, expected.GasLimit(), actual.GasLimit()) +} + +func assertEqualBigInt(t *testing.T, expected, actual *big.Int) { + if expected == nil && actual != nil { + require.EqualValues(t, 0, actual.Int64()) + } else if expected != nil && actual == nil { + require.EqualValues(t, expected.Int64(), 0) + } else { + require.EqualValuesf(t, 0, expected.Cmp(actual), "expected: %v, actual: %v", expected, actual) + } +} + +func assertEqualTransactionData(t *testing.T, expected, actual *types.TransactionData) { + require.Equal(t, expected.Type, actual.Type) + require.Equal(t, expected.Nonce, actual.Nonce) + require.Equal(t, expected.TxHash, actual.TxHash) + require.Equal(t, expected.Gas, actual.Gas) + assertEqualBigInt(t, expected.GasPrice.ToInt(), actual.GasPrice.ToInt()) + if expected.GasTipCap == nil { + assertEqualBigInt(t, expected.GasPrice.ToInt(), actual.GasTipCap.ToInt()) + } else { + assertEqualBigInt(t, expected.GasTipCap.ToInt(), actual.GasTipCap.ToInt()) + } + if expected.GasFeeCap == nil { + assertEqualBigInt(t, expected.GasPrice.ToInt(), actual.GasFeeCap.ToInt()) + } else { + assertEqualBigInt(t, expected.GasFeeCap.ToInt(), actual.GasFeeCap.ToInt()) + } + //require.Equal(t, expected.From, actual.From) + require.Equal(t, expected.To, actual.To) + // legacy tx chainID is derived from the V. However, since the signatures are not valid in the test data we skip this check. + if expected.Type != types.LegacyTxType { + assertEqualBigInt(t, expected.ChainId.ToInt(), actual.ChainId.ToInt()) + } + assertEqualBigInt(t, expected.Value.ToInt(), actual.Value.ToInt()) + require.Equal(t, expected.Data, actual.Data) + require.Equal(t, expected.IsCreate, actual.IsCreate) + require.ElementsMatch(t, expected.AccessList, actual.AccessList) + assertEqualBigInt(t, expected.V.ToInt(), actual.V.ToInt()) + assertEqualBigInt(t, expected.R.ToInt(), actual.R.ToInt()) + assertEqualBigInt(t, expected.S.ToInt(), actual.S.ToInt()) +} + +func replaceBlockNumber(block *Block, newNumber uint64) *Block { + block.Header.Number = new(big.Int).SetUint64(newNumber) + return block +} + +var seed int64 = 42 + +func generateRandomData(size int) string { + data := make([]byte, size) + + source := rand.NewSource(seed) + rng := rand.New(source) + + for i := range data { + data[i] = byte(rng.Intn(256)) + } + + return "0x" + common.Bytes2Hex(data) +} From 3cfed4308e892e7daa83a14774263579d30ac823 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:15:25 +0800 Subject: [PATCH 16/47] go mod tidy --- go.sum | 2 -- 1 file changed, 2 deletions(-) diff --git a/go.sum b/go.sum index 86ee84a..d191489 100644 --- a/go.sum +++ b/go.sum @@ -78,8 +78,6 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e h1:g8jtcGiHbjWYh/V7O245IDho3WfQT4CwEpBV+MhYDrg= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250103082839-ea3ec93d8c1e/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47 h1:JbYYsGZMU1yAYsF/Ds6STMlItYy0SXj86nqdXC7nHyk= github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47/go.mod h1:8WbNuuUjie/LTdFXGGT7Z711MRW8Vv2zWLrcibg7hDc= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= From eed341fd51783e26e110fe69e3ae3c1362f786b1 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 31 Jan 2025 17:19:04 +0800 Subject: [PATCH 17/47] fix linter warnings --- encoding/codecv7.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index c2aaa75..810d7ce 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -105,7 +105,8 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b if enableCompression { // compressedPayloadBytes represents the compressed blob payload - compressedPayloadBytes, err := zstd.CompressScrollBatchBytes(payloadBytes) + var compressedPayloadBytes []byte + compressedPayloadBytes, err = zstd.CompressScrollBatchBytes(payloadBytes) if err != nil { return nil, common.Hash{}, nil, fmt.Errorf("failed to compress blob payload: %w", err) } From be6b422f605f2155a4922d7f43447165826b173d Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Mon, 3 Feb 2025 17:31:55 +0800 Subject: [PATCH 18/47] add function MessageQueueV2ApplyL1MessagesFromBlocks to compute the L1 message hash from a given set of blocks --- encoding/codecv7_types.go | 55 +++++-------------------------------- encoding/da.go | 57 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 49 deletions(-) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 51608f8..2953368 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -9,7 +9,6 @@ import ( "github.com/klauspost/compress/zstd" "github.com/scroll-tech/go-ethereum/common" - "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" @@ -235,8 +234,6 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { binary.BigEndian.PutUint16(payloadBytes[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks], uint16(len(b.blocks))) l1MessageIndex := b.initialL1MessageIndex - var l1Messages []*types.L1MessageTx - var transactionBytes []byte for i, block := range b.blocks { // sanity check: block numbers are contiguous @@ -262,23 +259,6 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { // encode L2 txs as RLP and append to transactionBytes for _, txData := range block.Transactions { - if txData.Type == types.L1MessageTxType { - data, err := hexutil.Decode(txData.Data) - if err != nil { - return nil, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) - } - - l1Messages = append(l1Messages, &types.L1MessageTx{ - QueueIndex: txData.Nonce, - Gas: txData.Gas, - To: txData.To, - Value: txData.Value.ToInt(), - Data: data, - // Sender: , TODO: is this needed? - }) - continue - } - rlpTxData, err := convertTxDataToRLPEncoding(txData) if err != nil { return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) @@ -289,38 +269,15 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { payloadBytes = append(payloadBytes, transactionBytes...) // sanity check: initialL1MessageQueueHash+apply(L1Messages) = lastL1MessageQueueHash - if applyL1Messages(b.initialL1MessageQueueHash, l1Messages) != b.lastL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check lastL1MessageQueueHash after applying all L1 messages: expected %s, got %s", applyL1Messages(b.initialL1MessageQueueHash, l1Messages), b.lastL1MessageQueueHash) + computedLastL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(b.initialL1MessageQueueHash, b.blocks) + if err != nil { + return nil, fmt.Errorf("failed to apply L1 messages to initialL1MessageQueueHash: %w", err) } - - return payloadBytes, nil -} - -func applyL1Messages(initialQueueHash common.Hash, messages []*types.L1MessageTx) common.Hash { - rollingHash := initialQueueHash - for _, message := range messages { - rollingHash = applyL1Message(rollingHash, message) + if computedLastL1MessageQueueHash != b.lastL1MessageQueueHash { + return nil, fmt.Errorf("failed to sanity check lastL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedLastL1MessageQueueHash, b.lastL1MessageQueueHash) } - return rollingHash -} - -func applyL1Message(initialQueueHash common.Hash, message *types.L1MessageTx) common.Hash { - rollingHash := crypto.Keccak256Hash(initialQueueHash.Bytes(), types.NewTx(message).Hash().Bytes()) - - return encodeRollingHash(rollingHash) -} - -func encodeRollingHash(rollingHash common.Hash) common.Hash { - // clear last 36 bits - rollingHash[26] &= 0xF0 - rollingHash[27] = 0 - rollingHash[28] = 0 - rollingHash[29] = 0 - rollingHash[30] = 0 - rollingHash[31] = 0 - - return rollingHash + return payloadBytes, nil } func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { diff --git a/encoding/da.go b/encoding/da.go index 3abe6de..b0117d7 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -10,6 +10,7 @@ import ( "slices" "github.com/klauspost/compress/zstd" + "github.com/scroll-tech/go-ethereum/crypto" "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" @@ -758,3 +759,59 @@ func GetBatchEnableCompression(codecVersion CodecVersion, batch *Batch) (bool, e return false, fmt.Errorf("unsupported codec version: %v", codecVersion) } } + +func MessageQueueV2ApplyL1MessagesFromBlocks(initialQueueHash common.Hash, blocks []*Block) (common.Hash, error) { + rollingHash := initialQueueHash + for _, block := range blocks { + for _, txData := range block.Transactions { + if txData.Type != types.L1MessageTxType { + continue + } + + data, err := hexutil.Decode(txData.Data) + if err != nil { + return common.Hash{}, fmt.Errorf("failed to decode txData.Data: data=%v, err=%w", txData.Data, err) + } + + l1Message := &types.L1MessageTx{ + QueueIndex: txData.Nonce, + Gas: txData.Gas, + To: txData.To, + Value: txData.Value.ToInt(), + Data: data, + // Sender: , TODO: is this needed? + } + + rollingHash = messageQueueV2ApplyL1Message(rollingHash, l1Message) + } + } + + return rollingHash, nil +} + +func messageQueueV2ApplyL1Messages(initialQueueHash common.Hash, messages []*types.L1MessageTx) common.Hash { + rollingHash := initialQueueHash + for _, message := range messages { + rollingHash = messageQueueV2ApplyL1Message(rollingHash, message) + } + + return rollingHash +} + +func messageQueueV2ApplyL1Message(initialQueueHash common.Hash, message *types.L1MessageTx) common.Hash { + rollingHash := crypto.Keccak256Hash(initialQueueHash.Bytes(), types.NewTx(message).Hash().Bytes()) + + return messageQueueV2EncodeRollingHash(rollingHash) +} + +func messageQueueV2EncodeRollingHash(rollingHash common.Hash) common.Hash { + // clear last 36 bits + rollingHash[26] &= 0xF0 + rollingHash[27] = 0 + rollingHash[28] = 0 + rollingHash[29] = 0 + rollingHash[30] = 0 + rollingHash[31] = 0 + + return rollingHash +} From d77916b6e33a59c91bab535333231604e1f75dc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A9ter=20Garamv=C3=B6lgyi?= Date: Mon, 3 Feb 2025 16:50:20 +0100 Subject: [PATCH 19/47] fix lint and unit test errors --- encoding/codecv7_types.go | 3 +++ encoding/da.go | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 2953368..5430962 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -259,6 +259,9 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { // encode L2 txs as RLP and append to transactionBytes for _, txData := range block.Transactions { + if txData.Type == types.L1MessageTxType { + continue + } rlpTxData, err := convertTxDataToRLPEncoding(txData) if err != nil { return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) diff --git a/encoding/da.go b/encoding/da.go index b0117d7..78df130 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -255,6 +255,7 @@ func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { }) case types.L1MessageTxType: // L1MessageTxType is not supported + fallthrough default: return nil, fmt.Errorf("unsupported tx type: %d", txData.Type) } @@ -789,7 +790,7 @@ func MessageQueueV2ApplyL1MessagesFromBlocks(initialQueueHash common.Hash, block return rollingHash, nil } -func messageQueueV2ApplyL1Messages(initialQueueHash common.Hash, messages []*types.L1MessageTx) common.Hash { +func MessageQueueV2ApplyL1Messages(initialQueueHash common.Hash, messages []*types.L1MessageTx) common.Hash { rollingHash := initialQueueHash for _, message := range messages { rollingHash = messageQueueV2ApplyL1Message(rollingHash, message) From b71c047ffecc91720cd8110f9fac227c84ef3e29 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 4 Feb 2025 13:31:20 +0800 Subject: [PATCH 20/47] call checkCompressedDataCompatibility only once -> constructBlobPayload only once --- encoding/codecv7.go | 48 ++++++++++++++++++---------------------- encoding/codecv7_test.go | 36 +++++++++--------------------- 2 files changed, 31 insertions(+), 53 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 810d7ce..e4d86e5 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -90,11 +90,6 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { } func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) { - enableCompression, err := d.CheckBatchCompressedDataCompatibility(batch) - if err != nil { - return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) - } - blobBytes := make([]byte, blobEnvelopeV7OffsetPayload) blobBytes[blobEnvelopeV7OffsetVersion] = uint8(CodecV7) @@ -103,18 +98,12 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b return nil, common.Hash{}, nil, fmt.Errorf("failed to construct blob payload: %w", err) } + compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes) + if err != nil { + return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) + } + if enableCompression { - // compressedPayloadBytes represents the compressed blob payload - var compressedPayloadBytes []byte - compressedPayloadBytes, err = zstd.CompressScrollBatchBytes(payloadBytes) - if err != nil { - return nil, common.Hash{}, nil, fmt.Errorf("failed to compress blob payload: %w", err) - } - // Check compressed data compatibility. - if err = checkCompressedDataCompatibility(compressedPayloadBytes); err != nil { - log.Error("ConstructBlob: compressed data compatibility check failed", "err", err, "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) - return nil, common.Hash{}, nil, err - } blobBytes[blobEnvelopeV7OffsetCompressedFlag] = 0x1 payloadBytes = compressedPayloadBytes } else { @@ -221,29 +210,24 @@ func (d *DACodecV7) DecodeTxsFromBlob(blob *kzg4844.Blob, chunks []*DAChunkRawTx // checkCompressedDataCompatibility checks the compressed data compatibility for a batch. // It constructs a blob payload, compresses the data, and checks the compressed data compatibility. -func (d *DACodecV7) checkCompressedDataCompatibility(batch *Batch) (bool, error) { - payloadBytes, err := d.constructBlobPayload(batch) - if err != nil { - return false, fmt.Errorf("failed to construct blob payload: %w", err) - } - +func (d *DACodecV7) checkCompressedDataCompatibility(payloadBytes []byte) ([]byte, bool, error) { compressedPayloadBytes, err := zstd.CompressScrollBatchBytes(payloadBytes) if err != nil { - return false, fmt.Errorf("failed to compress blob payload: %w", err) + return nil, false, fmt.Errorf("failed to compress blob payload: %w", err) } if err = checkCompressedDataCompatibility(compressedPayloadBytes); err != nil { log.Warn("Compressed data compatibility check failed", "err", err, "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) - return false, nil + return nil, false, nil } // check if compressed data is bigger or equal to the original data -> no need to compress if len(compressedPayloadBytes) >= len(payloadBytes) { log.Warn("Compressed data is bigger or equal to the original data", "payloadBytes", hex.EncodeToString(payloadBytes), "compressedPayloadBytes", hex.EncodeToString(compressedPayloadBytes)) - return false, nil + return nil, false, nil } - return true, nil + return compressedPayloadBytes, true, nil } // CheckChunkCompressedDataCompatibility checks the compressed data compatibility for a batch built from a single chunk. @@ -263,7 +247,17 @@ func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return false, errors.New("batch must contain at least one block") } - return d.checkCompressedDataCompatibility(b) + payloadBytes, err := d.constructBlobPayload(b) + if err != nil { + return false, fmt.Errorf("failed to construct blob payload: %w", err) + } + + _, compatible, err := d.checkCompressedDataCompatibility(payloadBytes) + if err != nil { + return false, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) + } + + return compatible, nil } // TODO: which of the Estimate* functions are needed? diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 3544bd0..d12a375 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -13,7 +13,6 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -138,7 +137,7 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { creationErr: "unexpected queue index", }, { - name: "Batch with 4 blocks, blocktrace 02, 03, 04", + name: "Batch with 3 blocks, blocktrace 02, 03, 04", batch: &Batch{ InitialL1MessageIndex: 9, LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), @@ -405,15 +404,6 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { patches.ApplyFunc(checkCompressedDataCompatibility, func(_ []byte) error { return nil }) - - // Always enable compression. - patches.ApplyPrivateMethod(codecV7, "checkCompressedDataCompatibility", func(b *Batch) (bool, error) { - return true, nil - }) - - patches.ApplyMethodFunc(codecV7, "CheckBatchCompressedDataCompatibility", func(b *Batch) (bool, error) { - return codecV7.(*DACodecV7).checkCompressedDataCompatibility(b) - }) } repeat := func(element byte, count int) string { @@ -476,10 +466,10 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { expectedBlobVersionedHash: "0x01be8942fe0a3dc77590c9346866824f94f3e6a3b1774119c1e9720f763ede09", }, { - name: "single block, single tx, full blob random data -> data bigger compressed than uncompressed -> error blob exceeds maximum size", - numBlocks: 1, - txData: []string{generateRandomData(maxAvailableBytesIncompressable - bytesPerBlock)}, - creationErr: "blob exceeds maximum size", + name: "single block, single tx, full blob random data -> data bigger compressed than uncompressed", + numBlocks: 1, + txData: []string{generateRandomData(maxAvailableBytesIncompressable - bytesPerBlock)}, + expectedBlobVersionedHash: "0x01f1aea1fe3f8a37ff505bf3aa5895d959c004087c4573bd99dcbfa035d5eb57", }, { name: "2 blocks, single tx, full blob random data", @@ -557,12 +547,8 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { }) // Always disable compression. - patches.ApplyPrivateMethod(codecV7, "checkCompressedDataCompatibility", func(b *Batch) (bool, error) { - return false, nil - }) - - patches.ApplyMethodFunc(codecV7, "CheckBatchCompressedDataCompatibility", func(b *Batch) (bool, error) { - return codecV7.(*DACodecV7).checkCompressedDataCompatibility(b) + patches.ApplyPrivateMethod(codecV7, "checkCompressedDataCompatibility", func(payloadBytes []byte) ([]byte, bool, error) { + return nil, false, nil }) } @@ -687,12 +673,10 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { codecV7, err := CodecFromVersion(CodecV7) require.NoError(t, err) - // empty batch - emptyBatch := &Batch{} // bypass batch validation checks by calling checkCompressedDataCompatibility directly - compatible, err := codecV7.(*DACodecV7).checkCompressedDataCompatibility(emptyBatch) - assert.NoError(t, err) - assert.Equal(t, false, compatible) + _, compatible, err := codecV7.(*DACodecV7).checkCompressedDataCompatibility([]byte{0}) + require.NoError(t, err) + require.Equal(t, false, compatible) testCases := []struct { name string From cbed8b2507be70f82e6cb4663668c56ed0c72edf Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 4 Feb 2025 14:24:32 +0800 Subject: [PATCH 21/47] address review comments --- encoding/codecv7.go | 17 ++++----- encoding/codecv7_test.go | 76 +++++++++++++++++++-------------------- encoding/codecv7_types.go | 5 ++- encoding/da.go | 11 +++--- encoding/da_test.go | 75 ++++++++++++++++++++++++++++++++++++-- 5 files changed, 129 insertions(+), 55 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index e4d86e5..24f1e39 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -91,7 +91,6 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []byte, error) { blobBytes := make([]byte, blobEnvelopeV7OffsetPayload) - blobBytes[blobEnvelopeV7OffsetVersion] = uint8(CodecV7) payloadBytes, err := d.constructBlobPayload(batch) if err != nil { @@ -103,15 +102,17 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b return nil, common.Hash{}, nil, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) } + isCompressedFlag := uint8(0x0) if enableCompression { - blobBytes[blobEnvelopeV7OffsetCompressedFlag] = 0x1 + isCompressedFlag = 0x1 payloadBytes = compressedPayloadBytes - } else { - blobBytes[blobEnvelopeV7OffsetCompressedFlag] = 0x0 } sizeSlice := encodeSize3Bytes(uint32(len(payloadBytes))) + + blobBytes[blobEnvelopeV7OffsetVersion] = uint8(CodecV7) copy(blobBytes[blobEnvelopeV7OffsetByteSize:blobEnvelopeV7OffsetCompressedFlag], sizeSlice) + blobBytes[blobEnvelopeV7OffsetCompressedFlag] = isCompressedFlag blobBytes = append(blobBytes, payloadBytes...) if len(blobBytes) > maxEffectiveBlobBytes { @@ -179,12 +180,12 @@ func (d *DACodecV7) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { } // read the data size - blobEnvelopeSize := decodeSize3Bytes(rawBytes[blobEnvelopeV7OffsetByteSize:blobEnvelopeV7OffsetCompressedFlag]) - if blobEnvelopeSize+blobEnvelopeV7OffsetPayload > uint32(len(rawBytes)) { - return nil, fmt.Errorf("blob envelope size exceeds the raw data size: %d > %d", blobEnvelopeSize, len(rawBytes)) + blobPayloadSize := decodeSize3Bytes(rawBytes[blobEnvelopeV7OffsetByteSize:blobEnvelopeV7OffsetCompressedFlag]) + if blobPayloadSize+blobEnvelopeV7OffsetPayload > uint32(len(rawBytes)) { + return nil, fmt.Errorf("blob envelope size exceeds the raw data size: %d > %d", blobPayloadSize, len(rawBytes)) } - payloadBytes := rawBytes[blobEnvelopeV7OffsetPayload : blobEnvelopeV7OffsetPayload+blobEnvelopeSize] + payloadBytes := rawBytes[blobEnvelopeV7OffsetPayload : blobEnvelopeV7OffsetPayload+blobPayloadSize] // read the compressed flag and decompress if needed compressed := rawBytes[blobEnvelopeV7OffsetCompressedFlag] diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index d12a375..0ede909 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -126,8 +126,8 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { batch: &Batch{ Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, - expectedEncode: "070000000000000000018c671159176b607e2ec8333a37e1e58593fa6af330e533b45fa440b6b6399c0000000000000000000000000000000000000000000000000000000000000000", - expectedHash: "0xe43674f92aee5921602ccbfe555810ab3780b1df847eb7d8f52bce35ee42e709", + expectedEncode: "07000000000000000001a40a4ae0fa894115c6d157d928ae6d5b95e3a38e39d0112086db7a5b94d21e0000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0xae204a7f43d50947ed9033bddac0e8dcebeace076b60c20c4fdfd0284f94f5d4", }, { name: "Batch with 1 block, blocktrace 06, creation error=L1 messages not consecutive", @@ -140,15 +140,15 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { name: "Batch with 3 blocks, blocktrace 02, 03, 04", batch: &Batch{ InitialL1MessageIndex: 9, - LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4), }, }, - expectedEncode: "07000000000000000001feee34d945b6b7020630c7559303cc5a5d5b52be7111998d3e829948cf44390000000000000000000000000000000000000000000000000000000000000000", - expectedHash: "0xf547c2b7c24d0094a51c3e3eed36462a08d6c8558e3defd666d38717d0354cad", + expectedEncode: "070000000000000000012979a80d8cadee682dedb27d9b4a78d2c71c032ea875f853892e9c5c42d0e90000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0x7e4ce5fa62b86def764d9cc03d197bc945b54dc9c814fc0b011293036cfd7848", }, } @@ -230,8 +230,8 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { LastL1MessageQueueHash: common.Hash{}, Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, - expectedBlobEncode: "0007f9000001606c009d0700240e000002000163807b2a1de9000355418d1e81008400020000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e002adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa7008e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19fea00cd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf871010100bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f007b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bc00ec5bd4aba684835996fc3f879380aac1c09c6eed32f105006032821d60094200a4b00e450116", - expectedBlobVersionedHash: "0x018c671159176b607e2ec8333a37e1e58593fa6af330e533b45fa440b6b6399c", + expectedBlobEncode: "00070000f901606c009d0700240e000002000163807b2a1de9000355418d1e81008400020000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e002adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa7008e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19fea00cd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf871010100bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f007b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bc00ec5bd4aba684835996fc3f879380aac1c09c6eed32f105006032821d60094200a4b00e450116", + expectedBlobVersionedHash: "0x01a40a4ae0fa894115c6d157d928ae6d5b95e3a38e39d0112086db7a5b94d21e", }, { name: "Batch with 1 blocks, blocktrace 04 - 1 L1 message + 1 L2 tx", @@ -240,11 +240,11 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { ParentBatchHash: common.Hash{}, InitialL1MessageIndex: 9, InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "00076400000120a6fd0200e4040000090097f93d31db48682539b6a399f76a8e00f13b04d40cdd2b12d611774000040001646b6e137a120000020001df0b8082005dc0941a258d17bf244c4df02d40343a7626a9d321e10580808080800600b90000700281c9062076a0f105b", - expectedBlobVersionedHash: "0x019644784d15ae6866197bdede1eaa73a7b3beb6380413286039ee8b1be28c54", + expectedBlobEncode: "00070000650120a6050300f40400000900c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1c000040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", + expectedBlobVersionedHash: "0x01b7709103b123307086d3b4db44cde8d96d1a94dcf80e195461330c65939872", }, { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", @@ -253,11 +253,11 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { ParentBatchHash: common.Hash{}, InitialL1MessageIndex: 9, InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "0007670c00016025170d6300a4960000090097f93d31db48682539b6a399f76a008ef13b04d40cdd2b12d61177400002000363807b2a1de9000355418d1e818400000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8710080843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca2008a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e9000cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c004d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a03f00b2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f2000bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483005996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783119700e28080b915d260806040523480156200001157600080fd5b5060405162001400b2380380833981810160405260a08110378151602083015160408085018051009151939592948301929184648211639083019060208201858179825181118200820188101794825250918201929091019080838360005b83c357818101518300820152602001620000a9565b50505050905090810190601f16f1578082038000516001836020036101000a0319168191508051604051939291900115012b0100460175015b01a39081015185519093508592508491620001c891600391850100906200026b565b508051620001de90600490602084506005805461ff00196000ff1990911660121716905550600680546001600160a01b0380881619928316001790925560078054928716929091169190911790556200023081620002556200010000600160b01b03191633021790555062000307915050565b60ff19166000ff929092565b828160011615610100020316600290049060005260206000200090601f016020900481019282601f10620002ae5780518380011785de016001000185558215620002de579182015b8202de57825182559160200191906001c100565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000200f1565b61119b80620003176000396000f3fe61001004361061010b576000350060e01c80635c975abb116100a257806395d89b411161007114610301578063009dc29fac14610309578063a457c2d714610335578063a9059cbb1461036157008063dd62ed3e1461038d5761010b565b1461029d57806370a08231146102a5005780638456cb59146102cb5780638e50817a146102d3313ce567116100de57001461021d578063395093511461023b5780633f4ba83a1461026757806340c1000f191461027106fdde0314610110578063095ea7b31461018d57806318160d00dd146101cd57806323b872e7575b6101186103bb565b6040805160208082520083518183015283519192839290830161015261013a61017f9250508091039000f35b6101b9600480360360408110156101a381351690602001356104519115001582525190819003602001d561046e60fd81169160208101359091169060400074565b6102256104fb60ff90921640025105046f610552565b005b61026f02008705a956610654d520bb3516610662067d56e90135166106d218610757031f0007b856034b085f77c7d5a308db565b6003805420601f600260001961010060000188161502019095169490940493840181900481028201810190925282815200606093909290918301828280156104475780601f1061041c57610100808354000402835291610447565b825b8154815260200180831161042a57829003601f0016820191565b600061046561045e610906565b848461090a565b506001920200548184f6565b6104f18461048d6104ec8560405180606080602861108560280091398a166000908152600160205260408120906104cb81019190915260400100600020549190610b51565b935460ff160511016000610522908116825260200080830193909352604091820120918c168152925290205490610be8565b60070016331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b1b00ddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090046000ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460600508282610ced909052604006ca0ddd900407260c6b6f6e6c7920466163746f00727960a0079283918216179091559390921660041561080808550e65086c250011176025006108968dd491824080832093909416825233831661094f5704010080806020018281038252602401806110f36024913960400191fd821661099400223d60228084166000819487168084529482529182902085905581518581520091517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac800c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b83008383610f61565b610ac881265f60268685808220939093559084168152205400610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc37008daa952ba7f163c4a11628f55a4df523b3ef9291829003008184841115610b00e08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53616600654d6174683a206164646974696f6e206f766572666c6f7700610c9c147362001690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38a00eae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f2000746865207a65726f72657373610d546000600254610d610255902054610d870080838393519293910e2d6101001790557f62e78cea01bee320cd4e420270b500ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610eb6008260000ef3221b85839020550f199082610fb540805182600091851691912000565b610f6cb07415610fb02a113c602a00610c428383401e7375627472616300815250fe7472616e736665726275726e20616d6f756e742065786365656473002062616c616e6365617070726f7665616c6c6f7766726f6d646563726561730065642062656c6f775061757361626c653a20746f6b656e7768696c652070610075736564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c003bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d9fa007ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00100a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c63003172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda52095d4004b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105008080808080814ba8d130a9149a111111110549d2741105c418e61894eb01120020132d0b639c42c818e2c84840818c8c282848929a39f03038fd0ddb85e2d20015e23516285d6a71d2c1a2a351201ca40facab44851c1fbf00022ce74078cf00901c9845e0306b08cd4a2a70724e1a69542f7aaa8fd851465eda364face7ee001a0754a6938078358317ba99a460e1db3cb338ac8411a449017b478e3c0a8e000987303e5ce118af05cc7d6837a22db87d617154940bcebb0b88ffe71519a900f199051c3311ca347ec728aa797d3ae1518f69a4ff48024239b0a5eb787a4d00ffa1364c67d6c42460452ec2dbd194485fb9412b92e003774762efe150ac4b0058b7a96a03be266a9b0232cb472d206a773ff2364836708dc22081972f855b00edb3d2615426425652e36f684c12dd6db151ec85cb10129135858562adb69900680963a33684e492f40924e051899c9a79d7be012a8e287bd1686fa4546f80006053c5285d46f88c24e22d833b5f0e9cf69042bce0e25184cc90b64b03848400ae8ac12808269c14eaac86cca999bb30fa7d4dd43b8ac07d77c71554e34f5f00caeb72be43b6846bcec40b7f2edd062004352c7cd04b2049c0bcd36a11e22900d60708bcb5ba5cbcc788ea4489695753b85c9c08e8e8e2892a5cc9ae2a30c6009c666c8e3e8b0dfb4351f987aa79da99903218a5f6163147d0b638c045f05a006f1e540b9e4d6e4b60e8f0173ddbebc65d90ecc6f365be8933697ffaadd98b00a50e1aba18c30ce7714aa5ea14a61ef57f083002f22901d55ccd3e4a00d99d004ff8dc792f97644b039c85a6fb092e73113c172604746a3e876bc28c198aec007f57a2997e79c698b4a44bdccd959f16879a88844f5779600991113f0bf5d80052cf30d543afde04180d446e0be885d47f98da5a4a4552abd4956cce8c57bd0046d8ff1451ca8006bc5316ac418f06b831921e3fd9475d9a3e1e78556d09ae0029d5e6a44d56bf62cb3a8d6a40076fae05da1de31549471d4ea874406ebc7a001ef4236793fdc808270f9a6db446e2746a2c83d48156c9f7bdeddd3d1762510035840526201e073d608ca1b900cf8b416d0e", - expectedBlobVersionedHash: "0x01feee34d945b6b7020630c7559303cc5a5d5b52be7111998d3e829948cf4439", + expectedBlobEncode: "0007000c6801602517156300b49600000900c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1c00002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", + expectedBlobVersionedHash: "0x012979a80d8cadee682dedb27d9b4a78d2c71c032ea875f853892e9c5c42d0e9", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", @@ -266,11 +266,11 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { ParentBatchHash: common.Hash{2}, InitialL1MessageIndex: 36, InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + LastL1MessageQueueHash: common.HexToHash("0xb3e7e7f02af64f130535f65b0e7375d4ad3c43c2f05ad3dbe7402ab000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - expectedBlobEncode: "0007500c0001600517556200049500002400faa13a9ed8937474556dd2ea36be00845199e823322cd63279a3ba300002000363807b2a1de9000355418d1e8184000002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb000ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf670081e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce6400d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e900a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d008f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba60084835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783001197e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a0811037815160208301516040808501008051915193959294830192918464018211639083019060208201858179825100811182820188101794825250918201929091019080838360005b83c357818100015183820152602001620000a9565b50505050905090810190601f16f1578000820380516001836020036101000a031916819150805160405193929190011500012b01460175015b01a39081015185519093508592508491620001c891600300918501906200026b565b508051620001de90600490602084506005805461ff00001960ff1990911660121716905550600680546001600160a01b0380881619009283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b60ff00191660ff929092565b828160011615610100020316600290049060005260200060002090601f016020900481019282601f10620002ae5780518380011785de000160010185558215620002de579182015b8202de5782518255916020019190006001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010100620002f1565b61119b80620003176000396000f3fe61001004361061010b570060003560e01c80635c975abb116100a257806395d89b411161007114610301005780639dc29fac14610309578063a457c2d714610335578063a9059cbb1461000361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114006102a55780638456cb59146102cb5780638e50817a146102d3313ce56711610000de571461021d578063395093511461023b5780633f4ba83a146102675780006340c10f191461027106fdde0314610110578063095ea7b31461018d5780630018160ddd146101cd57806323b872e7575b6101186103bb565b6040805160200080825283518183015283519192839290830161015261013a61017f9250508000910390f35b6101b9600480360360408110156101a381351690602001356104005191151582525190819003602001d561046e60fd81169160208101359091160090604074565b6102256104fb60ff90921640025105046f610552565b005b6100026f028705a956610654d520bb3516610662067d56e90135166106d21861070057031f07b856034b085f77c7d5a308db565b6003805420601f600260001961000100600188161502019095169490940493840181900481028201810190925200828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782900003601f16820191565b600061046561045e610906565b848461090a565b506000019202548184f6565b6104f18461048d6104ec8560405180606080602861100085602891398a166000908152600160205260408120906104cb81019190915200604001600020549190610b51565b935460ff160511016000610522908116820052602080830193909352604091820120918c168152925290205490610be856005b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0818005b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600600064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046610063746f727960a0079283918216179091559390921660041561080808550e6500086c2511176025006108968dd491824080832093909416825233831661094f0057040180806020018281038252602401806110f36024913960400191fd821600610994223d60228084166000819487168084529482529182902085905581510085815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b00200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602361000a8b838383610f61565b610ac881265f60268685808220939093559084168100522054610af7908220409490945580905191937fddf252ad1be2c89b69c2b00068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484110015610be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f00536166654d6174683a206164646974696f6e206f766572666c6f7700610c9c001473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e53007bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e742000746f20746865207a65726f72657373610d546000600254610d61025590205400610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e42000270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad602100610eb68260000ef3221b85839020550f199082610fb540805182600091851600919120565b610f6cb07415610fb02a113c602a00610c428383401e7375627400726163815250fe7472616e736665726275726e20616d6f756e742065786365006564732062616c616e6365617070726f7665616c6c6f7766726f6d646563720065617365642062656c6f775061757361626c653a20746f6b656e7768696c650020706175736564a2646970667358221220e96342bec8f6c2bf72815a3999890073b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c0033000000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb01124013290b639c42c818e2c888502022230a0a92a4660ef030c800d41b020bd54b2b44d740a07429c5490a168c46d770903e68ae2a15b9317e2c0004189c033b3a4373603602c7ace1342b53011773d24ad36fd15b7de48eb2fc00d206f38470eefe38f032dd02c4ab13bc98c73b0513df4638d9dc250c234d8a00db32725451704a389ceee1bac7b82ea0d843bbf9684157cb16a2a2b4e01cb90080f85f5e91911a1f59c231134399a6ef0045b5ad4f473c6a518d741c49c051000e14e95a4f2fea3fd986399835d10958868b82513345a5afc0d18a04f001bb00a3ddf77028d625b493a92a017f16b545c199a5a3162c6ab71f7935920def0f003008e5e5f770ebf906771895899095d4eb377c27c98cb1d8d0f6c27908d9cd009ac3c2bf566b260d0901a236fee45cfa4420e08c12755dd8763f0214997f9200a331d158a9d600c1a48a53ba8cf01989ce9b5877f638601a2301f1ca8a47260032a76c971a0899b62a8e51c04c782d549e1a4252d3ebc2f55b6b82776a6ae200dd7df7888f3f6169afcbfe0ede12ae73265ef87369340081a9e1e7535f023b0002e6911653164f667d82c014aacbc56f8cea4efc98b23585c7c58d8056289e00abc2e5edea751698738fcde66721607f482a915105793b136706d5d1de43cc00d1a12d06b848bcf6860767c1c3925f101824fc45cf76b0716729fbfb9c9bb300e74c029f9e42f6f253073f5d1c33c3953ba5ae76cadec3f32c8d35807b4acc00de57538f62c3b9e4733fffdecb24d1d2b067cf74ffc2e55d34cde9847cad2700e7089aa031378a45f80af1e9d2cfb08c8dbb14a279c333cc5065927074955b0096b01931b2508f2bf58c523decea3dc0684972fb805ef9fed3bebdba54c6b7004ae1cb66cf78d56b84a74ff1ba9c72c0e4650137a83200452391f0ff7254ba00e98f80b82a83e0daa4daa8dfc4cfabb6b893a918a8e179b580ba783c76e94400c209940e088f7bcf839eefec98b96ccbf44164370b38239db665ec3b5055990012c19674c19b3d92c3db4a1810cf82de3016100381b7d5a0e605", - expectedBlobVersionedHash: "0x014f8d28ab7c68a0a0872636c13b0f473044360dfe43b4f0ab93ce0977cd3a42", + expectedBlobEncode: "0007000c4f016005174d6200049500002400b3e7e7f02af64f130535f65b0e730075d4ad3c43c2f05ad3dbe7402ab00002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", + expectedBlobVersionedHash: "0x01542d50fc3164320026dbcccc104931c29c363c30874701d1b942fa8aa90795", }, // test error cases { @@ -433,61 +433,61 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { { name: "no blocks", txData: []string{}, - expectedBlobVersionedHash: "0x01b2f5f5d7c4d370e1ec0d48fc0eca148c7a3a3d2cb60164a09c9bcea29051b9", + expectedBlobVersionedHash: "0x01c3d5ebe49678dcde7aa2e90b6bd451a11c2718b40aa739aa5f626550435389", }, { name: "single block, single tx", numBlocks: 1, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x01e8d5e04eae7327123212f9a549b0ee00514a50102919daa37f079c7c853685", + expectedBlobVersionedHash: "0x013b5be233a9a3ef576049b3dbd81b71f62ca2c99fde0e74dfbed59ba0e45bd2", }, { name: "single block, multiple tx", numBlocks: 1, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x0145c9f4f3954759b572df435c18cc805e06f935e540e2c89c687b232c2428d0", + expectedBlobVersionedHash: "0x016591dd97004a0bfd84efee01dd5cb10c477e4300f34dedf428d2cd154fc69d", }, { name: "multiple blocks, single tx per block", numBlocks: 3, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x014879b661d2c0d65f52104f3f1545aed4179a5522b8fa40a00f538d3c26ccc8", + expectedBlobVersionedHash: "0x01890ba0b9db428ca5545d1a58e5ba7735f92395e3dd7811ca1f652280bb1d3f", }, { name: "multiple blocks, multiple tx per block", numBlocks: 3, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x01ae9bb3857e66609840d78e3d7ac09f4664ae8e8918da13a8d83e722586402a", + expectedBlobVersionedHash: "0x014a47d175874f5b10d95deabe0a3b10ea2bdbc5080ea33b9f1a16a4d3c7395f", }, { name: "thousands of blocks, multiple tx per block", numBlocks: 10000, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x01be8942fe0a3dc77590c9346866824f94f3e6a3b1774119c1e9720f763ede09", + expectedBlobVersionedHash: "0x013e0d8453800705d2addbb1e1b18a32e4f122c1796118e332c12b76ac94f981", }, { name: "single block, single tx, full blob random data -> data bigger compressed than uncompressed", numBlocks: 1, txData: []string{generateRandomData(maxAvailableBytesIncompressable - bytesPerBlock)}, - expectedBlobVersionedHash: "0x01f1aea1fe3f8a37ff505bf3aa5895d959c004087c4573bd99dcbfa035d5eb57", + expectedBlobVersionedHash: "0x0116f6c465152096ad21177c0a3f418342550e5c87a64636a900ac53d6737db8", }, { name: "2 blocks, single tx, full blob random data", numBlocks: 2, txData: []string{generateRandomData(maxAvailableBytesIncompressable/2 - bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x01813145647585e490c7d14eab5aec876f2363954956e0b8d4658f211d5d1fbc", + expectedBlobVersionedHash: "0x01b1c7f234b9f42f09e950d60f9dbf6f5811f0a9abdb85a4a954e731a9ff56d7", }, { name: "single block, single tx, full blob repeat data", numBlocks: 1, txData: []string{repeat(0x12, maxAvailableBytesCompressable-bytesPerBlock)}, - expectedBlobVersionedHash: "0x01ac3403d7e4484fd5569c1042956cf2e5cadb03802603f4ce8ae890c4bc2414", + expectedBlobVersionedHash: "0x01ce5ed50a28906dd5f1556f6da913c24b6637a1d1aa6ff53d0abfb078e1ac44", }, { name: "2 blocks, single 2, full blob random data", numBlocks: 2, txData: []string{repeat(0x12, maxAvailableBytesCompressable/2-bytesPerBlock*2), repeat(0x13, maxAvailableBytesCompressable/2-bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x01c31afe47f81de670e7e8263d1e8e01e452a3afc296528ebb447895d9572238", + expectedBlobVersionedHash: "0x01af3e8f72659c3e4bb6193fe8acc6548589f1a887a0a26ea56fdcae2ac62f81", }, { name: "single block, single tx, full blob random data -> error because 1 byte too big", @@ -577,31 +577,31 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { { name: "no blocks", txData: []string{}, - expectedBlobVersionedHash: "0x0156a6430f1a7f819f41f4dfda7453c99693670447257f3f3b2f5a07beb47ae9", + expectedBlobVersionedHash: "0x01a821a71e2f0e7409d257c2b070cd4626825a6de5a2e3eda0099c21c8b16bd9", }, { name: "single block, single tx", numBlocks: 1, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x011557bb7fdefb1a973d852d4f1c1ab46e46b5028a6f702821972d15a3a7bf36", + expectedBlobVersionedHash: "0x019ed4e5a68c7da4141a94887837d7a405285d2aaedf9701ad98fe7c27af48eb", }, { name: "single block, multiple tx", numBlocks: 1, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x010506ab63a9d8a3221df8c10fcc83f5fc9c072928b5bbe179386832ac422fa4", + expectedBlobVersionedHash: "0x01943ef319ee733ebbd63e5facf430aa02c0b7da1f3c9eb7e2cb98b8ff63aa04", }, { name: "multiple blocks, single tx per block", numBlocks: 3, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x01e1c40d1f432836f394263e1f2a11c0704b2d3d94e99e48f589df45559b39c8", + expectedBlobVersionedHash: "0x013182a3b34bf4a390f8d74d35e922c4e116c45872da8b6f69661510d33736d8", }, { name: "multiple blocks, multiple tx per block", numBlocks: 3, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x01199ab5ee3c5c212843bffe27f07b0e85de1fc1f4e1fb8a7c4edbeb545397d6", + expectedBlobVersionedHash: "0x018077923a1617eae61bb6f296124f937656e9ab0852ce577e8b0f066207fe7e", }, { name: "thousands of blocks, multiple tx per block -> too big error", @@ -613,25 +613,25 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { name: "single block, single tx, full blob random data", numBlocks: 1, txData: []string{generateRandomData(maxAvailableBytes - bytesPerBlock)}, - expectedBlobVersionedHash: "0x01f1aea1fe3f8a37ff505bf3aa5895d959c004087c4573bd99dcbfa035d5eb57", + expectedBlobVersionedHash: "0x0116f6c465152096ad21177c0a3f418342550e5c87a64636a900ac53d6737db8", }, { name: "2 blocks, single tx, full blob random data", numBlocks: 2, txData: []string{generateRandomData(maxAvailableBytes/2 - bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x016a8c8e6a56f7a2895b3c5f75dd34a4b8248e0b47d60fca576fa60c571a5812", + expectedBlobVersionedHash: "0x0123aa955d8c0bbc0baca398d017b316dcb5a7716fe0517a3dee563512f67584", }, { name: "single block, single tx, full blob repeat data", numBlocks: 1, txData: []string{repeat(0x12, maxAvailableBytes-bytesPerBlock)}, - expectedBlobVersionedHash: "0x01ddad97c4d0eaa751c9e74d1a4a805da9434802ce61572ac0b5a87074230bc8", + expectedBlobVersionedHash: "0x019fff94371bb8986d294a036268f6121257cefa6b520f383e327e0dc5a02d9c", }, { name: "2 blocks, 2 tx, full blob random data", numBlocks: 2, txData: []string{repeat(0x12, maxAvailableBytes/4-bytesPerBlock*2), repeat(0x13, maxAvailableBytes/4-bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x0126e942bc804b28f9f33c481ef6235e0affcda37be0e4281645067ed2577fe3", + expectedBlobVersionedHash: "0x01c9a49d50a70ad2aba13c199531fa40d43a909a65d9f19dd565be7259b415ed", }, { name: "single block, single tx, full blob random data -> error because 1 byte too big", @@ -702,7 +702,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Single Block 04", batch: &Batch{ InitialL1MessageIndex: 9, - LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, expectCompatible: true, @@ -711,7 +711,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Single Block 05, only L1 messages", batch: &Batch{ InitialL1MessageIndex: 36, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + LastL1MessageQueueHash: common.HexToHash("0xb3e7e7f02af64f130535f65b0e7375d4ad3c43c2f05ad3dbe7402ab000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, expectCompatible: true, @@ -736,7 +736,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Multiple Blocks 02, 03, 04, 05", batch: &Batch{ InitialL1MessageIndex: 9, - LastL1MessageQueueHash: common.HexToHash("0x97f93d31db48682539b6a399f76a8ef13b04d40cdd2b12d61177400000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 5430962..f38d765 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -68,7 +68,6 @@ const ( // Below is the encoding for DABlockV7, total 52 bytes. // * Field Bytes Type Index Comments -// * blockNumber 8 uint64 0 The height of this block. // * timestamp 8 uint64 0 The timestamp of this block. // * baseFee 32 uint256 8 The base fee of this block. // * gasLimit 8 uint64 40 The gas limit of this block. @@ -424,9 +423,9 @@ func decompressV7Bytes(compressedBytes []byte) ([]byte, error) { } func decodeSize3Bytes(data []byte) uint32 { - return uint32(data[0]) | uint32(data[1])<<8 | uint32(data[2])<<16 + return uint32(data[0])<<16 | uint32(data[1])<<8 | uint32(data[2]) } func encodeSize3Bytes(data uint32) []byte { - return []byte{byte(data), byte(data >> 8), byte(data >> 16)} + return []byte{byte(data >> 16), byte(data >> 8), byte(data)} } diff --git a/encoding/da.go b/encoding/da.go index 78df130..38b3339 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -141,7 +141,7 @@ func (b *Block) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { return *lastQueueIndex - totalL1MessagePoppedBefore + 1 } -// NumL1MessagesNoSkipping returns the number of L1 messages in this block. +// NumL1MessagesNoSkipping returns the number of L1 messages and the highest queue index in this block. // This method assumes that L1 messages can't be skipped. func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, error) { var count uint16 @@ -780,7 +780,7 @@ func MessageQueueV2ApplyL1MessagesFromBlocks(initialQueueHash common.Hash, block To: txData.To, Value: txData.Value.ToInt(), Data: data, - // Sender: , TODO: is this needed? + Sender: txData.From, } rollingHash = messageQueueV2ApplyL1Message(rollingHash, l1Message) @@ -807,8 +807,11 @@ func messageQueueV2ApplyL1Message(initialQueueHash common.Hash, message *types.L func messageQueueV2EncodeRollingHash(rollingHash common.Hash) common.Hash { // clear last 36 bits - rollingHash[26] &= 0xF0 - rollingHash[27] = 0 + + // Clear the lower 4 bits of byte 26 (preserving the upper 4 bits) + rollingHash[27] &= 0xF0 + + // Clear the next 4 bytes (32 bits total) rollingHash[28] = 0 rollingHash[29] = 0 rollingHash[30] = 0 diff --git a/encoding/da_test.go b/encoding/da_test.go index 5c5c150..0e6fbfe 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -6,11 +6,10 @@ import ( "os" "testing" - "github.com/stretchr/testify/assert" - "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/log" + "github.com/stretchr/testify/assert" "github.com/scroll-tech/da-codec/encoding/zstd" ) @@ -151,3 +150,75 @@ func readBlockFromJSON(t *testing.T, filename string) *Block { assert.NoError(t, json.Unmarshal(data, block)) return block } + +func TestMessageQueueV2EncodeRollingHash(t *testing.T) { + testCases := []struct { + name string + input common.Hash + expectedOutput common.Hash + }{ + { + "zero hash", + common.Hash{}, + common.Hash{}, + }, + { + "all bits set", + common.Hash{ + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + }, + common.Hash{ + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xF0, + 0x00, 0x00, 0x00, 0x00, + }, + }, + { + "random bytes", + common.Hash{ + 0x00, 0x11, 0x22, 0x33, + 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, + 0xCC, 0xDD, 0xEE, 0xFF, + 0x00, 0x11, 0x22, 0x33, + 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, + 0xCC, 0xDD, 0xEE, 0xFF, + }, + common.Hash{ + 0x00, 0x11, 0x22, 0x33, + 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xBB, + 0xCC, 0xDD, 0xEE, 0xFF, + 0x00, 0x11, 0x22, 0x33, + 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xAA, 0xB0, + 0x00, 0x00, 0x00, 0x00, + }, + }, + { + "random hash", + common.HexToHash("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + common.HexToHash("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567000000000"), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + modified := messageQueueV2EncodeRollingHash(tc.input) + assert.Equal(t, tc.expectedOutput, modified) + }) + } +} From 392b6ff736d03d0bd35103a9482c5d5f1a026cbb Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 4 Feb 2025 18:19:27 +0800 Subject: [PATCH 22/47] update BlobEnvelopeV7 documentation --- encoding/codecv7_types.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index f38d765..e02a622 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -30,9 +30,9 @@ const ( // Below is the encoding format for BlobEnvelopeV7. // * Field Bytes Type Index Comments // * version 1 uint8 0 The version of the DA codec (batch/blob) -// * n_bytes[1] 1 uint8 1 Value denoting the number of bytes, n_bytes[1] +// * n_bytes[1] 1 uint8 1 Value denoting the number of bytes, n_bytes[1]*256^2 // * n_bytes[2] 1 uint8 2 Value denoting the number of bytes, n_bytes[2]*256 -// * n_bytes[3] 1 uint8 3 Value denoting the number of bytes, n_bytes[3]*256^2 +// * n_bytes[3] 1 uint8 3 Value denoting the number of bytes, n_bytes[3] // * flag 1 bool 4 1-byte flag to denote zstd-encoded/raw bytes // * payload N bytes 5 Possibly zstd-encoded payload bytes // * padding (4096*31 - (N+5)) bytes N+5 Padding to align to 4096*31 bytes From edaf5d2f9f24d4c7b9848491a24247b5d826381a Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 5 Feb 2025 09:18:41 +0800 Subject: [PATCH 23/47] add CodecV7 to general util functions --- encoding/da.go | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index 38b3339..a4ddfa7 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -691,8 +691,10 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin return "darwin" } else if !config.IsEuclid(blockTimestamp) { return "darwinV2" - } else { + } else if !config.IsEuclidV2(blockTimestamp) { return "euclid" + } else { + return "euclidV2" } } @@ -709,9 +711,11 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin return CodecV3 } else if !config.IsEuclid(blockTimestamp) { return CodecV4 - } else { + } else if !config.IsEuclidV2(blockTimestamp) { // V5 is skipped, because it is only used for the special Euclid transition batch that we handle explicitly return CodecV6 + } else { + return CodecV7 } } @@ -740,7 +744,7 @@ func GetChunkEnableCompression(codecVersion CodecVersion, chunk *Chunk) (bool, e return false, nil case CodecV2, CodecV3: return true, nil - case CodecV4, CodecV5, CodecV6: + case CodecV4, CodecV5, CodecV6, CodecV7: return CheckChunkCompressedDataCompatibility(chunk, codecVersion) default: return false, fmt.Errorf("unsupported codec version: %v", codecVersion) @@ -754,7 +758,7 @@ func GetBatchEnableCompression(codecVersion CodecVersion, batch *Batch) (bool, e return false, nil case CodecV2, CodecV3: return true, nil - case CodecV4, CodecV5, CodecV6: + case CodecV4, CodecV5, CodecV6, CodecV7: return CheckBatchCompressedDataCompatibility(batch, codecVersion) default: return false, fmt.Errorf("unsupported codec version: %v", codecVersion) From 894a93bb6b5318adbe7b02d55783d518ac350735 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 5 Feb 2025 12:32:14 +0800 Subject: [PATCH 24/47] add InitialL1MessageQueueHash and LastL1MessageQueueHash to encoding.Chunk --- encoding/da.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/encoding/da.go b/encoding/da.go index a4ddfa7..e9ba789 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -108,6 +108,10 @@ type Block struct { // Chunk represents a group of blocks. type Chunk struct { Blocks []*Block `json:"blocks"` + + // CodecV7. Used for chunk creation in relayer. + InitialL1MessageQueueHash common.Hash + LastL1MessageQueueHash common.Hash } // Batch represents a batch of chunks. From 2611ae171a8c82def68deace17bc0222bb0d6cd9 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 7 Feb 2025 08:29:25 +0800 Subject: [PATCH 25/47] go mod tidy --- go.sum | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go.sum b/go.sum index d191489..1c6e0fe 100644 --- a/go.sum +++ b/go.sum @@ -78,8 +78,8 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47 h1:JbYYsGZMU1yAYsF/Ds6STMlItYy0SXj86nqdXC7nHyk= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250129031936-44c72cd3fa47/go.mod h1:8WbNuuUjie/LTdFXGGT7Z711MRW8Vv2zWLrcibg7hDc= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38 h1:IKkevP42IQx8DQvtVq9WOmZDQrto59CGdEheXPf20HA= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= From 4d46aad77f4e4fce50cbc6a3ad71586dea39a7fc Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 7 Feb 2025 08:31:47 +0800 Subject: [PATCH 26/47] upgrade go-ethereum dependency to latest develop --- go.mod | 2 +- go.sum | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 6e96704..f9cae06 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/agiledragon/gomonkey/v2 v2.12.0 - github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38 + github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index 1c6e0fe..c605ba5 100644 --- a/go.sum +++ b/go.sum @@ -80,6 +80,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38 h1:IKkevP42IQx8DQvtVq9WOmZDQrto59CGdEheXPf20HA= github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f h1:WgIRuMWa7Q/xD1LHPEbQ9PpltasNiYR04qFzatiP/R0= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= From f4b274c7bc1ffcd27e17114e181d35845c27adbd Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Fri, 7 Feb 2025 17:01:56 +0800 Subject: [PATCH 27/47] implement estimate functions --- encoding/codecv7.go | 71 ++++++++++++++++++++++++++++++++++----------- 1 file changed, 54 insertions(+), 17 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 24f1e39..4f87f3d 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -261,41 +261,78 @@ func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error return compatible, nil } -// TODO: which of the Estimate* functions are needed? +func (d *DACodecV7) estimateL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, uint64, error) { + blobBytes := make([]byte, blobEnvelopeV7OffsetPayload) + + payloadBytes, err := d.constructBlobPayload(batch) + if err != nil { + return 0, 0, fmt.Errorf("failed to construct blob payload: %w", err) + } + + compressedPayloadBytes, enableCompression, err := d.checkCompressedDataCompatibility(payloadBytes) + if err != nil { + return 0, 0, fmt.Errorf("failed to check batch compressed data compatibility: %w", err) + } + + if enableCompression { + blobBytes = append(blobBytes, compressedPayloadBytes...) + } else { + blobBytes = append(blobBytes, payloadBytes...) + } + return blobEnvelopeV7OffsetPayload + uint64(len(payloadBytes)), calculatePaddedBlobSize(uint64(len(blobBytes))), nil +} + +// EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV7) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") + return d.estimateL1CommitBatchSizeAndBlobSize(&Batch{Blocks: chunk.Blocks}) } +// EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. func (d *DACodecV7) EstimateBatchL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") + return d.estimateL1CommitBatchSizeAndBlobSize(batch) } +// EstimateBlockL1CommitCalldataSize calculates the calldata size in l1 commit for this block approximately. +// Note: For CodecV7 calldata is constant independently of how many blocks or batches are submitted. func (d *DACodecV7) EstimateBlockL1CommitCalldataSize(block *Block) (uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") + return 0, nil } +// EstimateChunkL1CommitCalldataSize calculates the calldata size needed for committing a chunk to L1 approximately. +// Note: For CodecV7 calldata is constant independently of how many blocks or batches are submitted. There is no notion +// of chunks in this version. func (d *DACodecV7) EstimateChunkL1CommitCalldataSize(chunk *Chunk) (uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") + return 0, nil +} + +// EstimateBatchL1CommitCalldataSize calculates the calldata size in l1 commit for this batch approximately. +// Note: For CodecV7 calldata is constant independently of how many blocks or batches are submitted. +// Version + BatchHeader +func (d *DACodecV7) EstimateBatchL1CommitCalldataSize(batch *Batch) (uint64, error) { + return 1 + daBatchV7EncodedLength, nil } +// EstimateChunkL1CommitGas calculates the total L1 commit gas for this chunk approximately. +// Note: For CodecV7 calldata is constant independently of how many blocks or batches are submitted. There is no notion +// of chunks in this version. func (d *DACodecV7) EstimateChunkL1CommitGas(chunk *Chunk) (uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") + return 0, nil } +// EstimateBatchL1CommitGas calculates the total L1 commit gas for this batch approximately. func (d *DACodecV7) EstimateBatchL1CommitGas(batch *Batch) (uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") -} + // TODO: adjust this after contracts are implemented + var totalL1CommitGas uint64 -func (d *DACodecV7) EstimateBatchL1CommitCalldataSize(batch *Batch) (uint64, error) { - //TODO implement me after contracts are implemented - panic("implement me") + // Add extra gas costs + totalL1CommitGas += extraGasCost // constant to account for ops like _getAdmin, _implementation, _requireNotPaused, etc + totalL1CommitGas += 4 * coldSloadGas // 4 one-time cold sload for commitBatch + totalL1CommitGas += sstoreGas // 1 time sstore + totalL1CommitGas += baseTxGas // base gas for tx + totalL1CommitGas += calldataNonZeroByteGas // version in calldata + + return totalL1CommitGas, nil } // JSONFromBytes converts the bytes to a DABatch and then marshals it to JSON. From 3c106a262c45f1ca91af7c18a18dca3740760e6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A9ter=20Garamv=C3=B6lgyi?= Date: Fri, 7 Feb 2025 13:05:14 +0100 Subject: [PATCH 28/47] update TestMain and run go mod tidy --- encoding/da_test.go | 4 ++-- go.sum | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/encoding/da_test.go b/encoding/da_test.go index 0e6fbfe..08df23c 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -19,8 +19,8 @@ func TestMain(m *testing.M) { glogger.Verbosity(log.LvlInfo) log.Root().SetHandler(glogger) - m.Run() - os.Exit(0) + code := m.Run() + os.Exit(code) } func TestUtilFunctions(t *testing.T) { diff --git a/go.sum b/go.sum index c605ba5..e7cda67 100644 --- a/go.sum +++ b/go.sum @@ -78,8 +78,6 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38 h1:IKkevP42IQx8DQvtVq9WOmZDQrto59CGdEheXPf20HA= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250205135740-4bdf6d096c38/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f h1:WgIRuMWa7Q/xD1LHPEbQ9PpltasNiYR04qFzatiP/R0= github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= From 538036b20f23d6281f4c1f05a6f8668f1f6d1773 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Mon, 10 Feb 2025 07:32:06 +0800 Subject: [PATCH 29/47] add NewDAChunk to CodecV7 for easier use in relayer --- encoding/codecv7.go | 66 ++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 62 insertions(+), 4 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 4f87f3d..4b1f104 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -9,6 +9,7 @@ import ( "math" "github.com/scroll-tech/go-ethereum/common" + "github.com/scroll-tech/go-ethereum/core/types" "github.com/scroll-tech/go-ethereum/crypto/kzg4844" "github.com/scroll-tech/go-ethereum/log" @@ -60,10 +61,67 @@ func (d *DACodecV7) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. -// Note: For DACodecV7, this function is not implemented since there is no notion of DAChunk in this version. Blobs -// contain the entire batch data without any information of Chunks within. -func (d *DACodecV7) NewDAChunk(_ *Chunk, _ uint64) (DAChunk, error) { - return nil, nil +// Note: In DACodecV7 there is no notion of chunks. Blobs contain the entire batch data without any information of Chunks within. +// However, for compatibility reasons this function is implemented to create a DAChunk from a Chunk. +// This way we can still uniquely identify a set of blocks and their L1 messages. +func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) (DAChunk, error) { + if chunk == nil { + return nil, errors.New("chunk is nil") + } + + if len(chunk.Blocks) == 0 { + return nil, errors.New("number of blocks is 0") + } + + if len(chunk.Blocks) > math.MaxUint8 { + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) + } + + initialL2BlockNumber := chunk.Blocks[0].Header.Number.Uint64() + l1MessageIndex := totalL1MessagePoppedBefore + + blocks := make([]DABlock, 0, len(chunk.Blocks)) + txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) + + for i, block := range chunk.Blocks { + // sanity check: block numbers are contiguous + if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { + return nil, fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) + } + + // sanity check (within NumL1MessagesNoSkipping): L1 message indices are contiguous within a block + numL1Messages, highestQueueIndex, err := block.NumL1MessagesNoSkipping() + if err != nil { + return nil, fmt.Errorf("failed to get numL1Messages: %w", err) + } + // sanity check: L1 message indices are contiguous across blocks boundaries + if numL1Messages > 0 { + if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex { + return nil, fmt.Errorf("failed to sanity check L1 messages count: l1MessageIndex + numL1Messages != highestQueueIndex: %d + %d != %d", l1MessageIndex, numL1Messages, highestQueueIndex) + } + l1MessageIndex = highestQueueIndex + } + + daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) + blocks = append(blocks, daBlock) + txs = append(txs, block.Transactions) + } + + daChunk := newDAChunkV1( + blocks, // blocks + txs, // transactions + ) + + // sanity check: initialL1MessageQueueHash+apply(L1Messages) = lastL1MessageQueueHash + computedLastL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(chunk.InitialL1MessageQueueHash, chunk.Blocks) + if err != nil { + return nil, fmt.Errorf("failed to apply L1 messages to initialL1MessageQueueHash: %w", err) + } + if computedLastL1MessageQueueHash != chunk.LastL1MessageQueueHash { + return nil, fmt.Errorf("failed to sanity check lastL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedLastL1MessageQueueHash, chunk.LastL1MessageQueueHash) + } + + return daChunk, nil } // NewDABatch creates a DABatch including blob from the provided Batch. From cfb316bd5d65b941a840bf836497b02bc8335753 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Mon, 10 Feb 2025 07:45:23 +0800 Subject: [PATCH 30/47] add daChunkV7 type to calculate chunk hash --- encoding/codecv7.go | 2 +- encoding/codecv7_types.go | 47 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 4b1f104..34f19c5 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -107,7 +107,7 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs = append(txs, block.Transactions) } - daChunk := newDAChunkV1( + daChunk := newDAChunkV7( blocks, // blocks txs, // transactions ) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index e02a622..5a3750e 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -399,6 +399,53 @@ func (b *daBlockV7) Decode(data []byte) error { return nil } +// daChunkV7 groups consecutive DABlocks with their transactions. +// Note: In DACodecV7 there is no notion of chunks. Blobs contain the entire batch data without any information of Chunks within. +// However, for compatibility reasons DAChunks are still used in the codebase. +// This way we can still uniquely identify a set of blocks and their L1 messages via their hash. +type daChunkV7 struct { + daChunkV1 +} + +// newDAChunkV1 is a constructor for daChunkV1, initializing with blocks and transactions. +func newDAChunkV7(blocks []DABlock, transactions [][]*types.TransactionData) *daChunkV7 { + return &daChunkV7{ + daChunkV1{ + blocks: blocks, + transactions: transactions, + }, + } +} + +// Hash computes the hash of the DAChunk data. +func (c *daChunkV7) Hash() (common.Hash, error) { + var dataBytes []byte + + // concatenate block contexts + for _, block := range c.blocks { + encodedBlock := block.Encode() + dataBytes = append(dataBytes, encodedBlock...) + } + + // concatenate l1 tx hashes + for _, blockTxs := range c.transactions { + for _, txData := range blockTxs { + if txData.Type != types.L1MessageTxType { + continue + } + + hashBytes := common.FromHex(txData.TxHash) + if len(hashBytes) != common.HashLength { + return common.Hash{}, fmt.Errorf("unexpected hash: %s", txData.TxHash) + } + dataBytes = append(dataBytes, hashBytes...) + } + } + + hash := crypto.Keccak256Hash(dataBytes) + return hash, nil +} + // decompressV7Bytes decompresses the given blob bytes into the original payload bytes. func decompressV7Bytes(compressedBytes []byte) ([]byte, error) { var res []byte From c6ae41e8300281956bc7a86b6f239a9c98df34a4 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Mon, 10 Feb 2025 08:18:55 +0800 Subject: [PATCH 31/47] allow batch.chunks but check consistency with batch.blocks --- encoding/codecv7.go | 42 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 34f19c5..0ebc05e 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -126,14 +126,30 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) // NewDABatch creates a DABatch including blob from the provided Batch. func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { - if len(batch.Chunks) != 0 { - return nil, errors.New("batch must not contain any chunks") - } - if len(batch.Blocks) == 0 { return nil, errors.New("batch must contain at least one block") } + // If the batch contains chunks, we need to ensure that the blocks in the chunks match the blocks in the batch. + // Chunks are not directly used in DACodecV7, but we still need to check the consistency of the blocks. + // This is done to ensure compatibility with older versions and the relayer implementation. + if len(batch.Chunks) != 0 { + totalBlocks := len(batch.Blocks) + chunkBlocksCount := 0 + for _, chunk := range batch.Chunks { + for _, block := range chunk.Blocks { + if chunkBlocksCount > totalBlocks { + return nil, errors.New("chunks contain more blocks than the batch") + } + + if batch.Blocks[chunkBlocksCount].Header.Hash() != block.Header.Hash() { + return nil, errors.New("blocks in chunks do not match the blocks in the batch") + } + chunkBlocksCount++ + } + } + } + blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch) if err != nil { return nil, fmt.Errorf("failed to construct blob: %w", err) @@ -298,8 +314,24 @@ func (d *DACodecV7) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { + // If the batch contains chunks, we need to ensure that the blocks in the chunks match the blocks in the batch. + // Chunks are not directly used in DACodecV7, but we still need to check the consistency of the blocks. + // This is done to ensure compatibility with older versions and the relayer implementation. if len(b.Chunks) != 0 { - return false, errors.New("batch must not contain any chunks") + totalBlocks := len(b.Blocks) + chunkBlocksCount := 0 + for _, chunk := range b.Chunks { + for _, block := range chunk.Blocks { + if chunkBlocksCount > totalBlocks { + return false, errors.New("chunks contain more blocks than the batch") + } + + if b.Blocks[chunkBlocksCount].Header.Hash() != block.Header.Hash() { + return false, errors.New("blocks in chunks do not match the blocks in the batch") + } + chunkBlocksCount++ + } + } } if len(b.Blocks) == 0 { From d028c537b995acbdf9f3d6db84d138268f9691e6 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Mon, 10 Feb 2025 12:19:51 +0800 Subject: [PATCH 32/47] fix off-by-one error with L1 messages --- encoding/codecv7.go | 13 +++++++++---- encoding/codecv7_test.go | 34 +++++++++++++++++----------------- encoding/codecv7_types.go | 6 +++--- encoding/da.go | 1 + 4 files changed, 30 insertions(+), 24 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 0ebc05e..b07c62a 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -96,10 +96,10 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // sanity check: L1 message indices are contiguous across blocks boundaries if numL1Messages > 0 { - if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex { - return nil, fmt.Errorf("failed to sanity check L1 messages count: l1MessageIndex + numL1Messages != highestQueueIndex: %d + %d != %d", l1MessageIndex, numL1Messages, highestQueueIndex) + if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { + return nil, fmt.Errorf("failed to sanity check L1 messages count after block %d: l1MessageIndex + numL1Messages != highestQueueIndex+1: %d + %d != %d", block.Header.Number.Uint64(), l1MessageIndex, numL1Messages, highestQueueIndex+1) } - l1MessageIndex = highestQueueIndex + l1MessageIndex += uint64(numL1Messages) } daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) @@ -375,7 +375,12 @@ func (d *DACodecV7) estimateL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV7) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { - return d.estimateL1CommitBatchSizeAndBlobSize(&Batch{Blocks: chunk.Blocks}) + return d.estimateL1CommitBatchSizeAndBlobSize(&Batch{ + Blocks: chunk.Blocks, + InitialL1MessageIndex: chunk.InitialL1MessageIndex, + InitialL1MessageQueueHash: chunk.InitialL1MessageQueueHash, + LastL1MessageQueueHash: chunk.LastL1MessageQueueHash, + }) } // EstimateBatchL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a batch. diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 0ede909..65a09a9 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -139,7 +139,7 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { { name: "Batch with 3 blocks, blocktrace 02, 03, 04", batch: &Batch{ - InitialL1MessageIndex: 9, + InitialL1MessageIndex: 10, LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), @@ -147,8 +147,8 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4), }, }, - expectedEncode: "070000000000000000012979a80d8cadee682dedb27d9b4a78d2c71c032ea875f853892e9c5c42d0e90000000000000000000000000000000000000000000000000000000000000000", - expectedHash: "0x7e4ce5fa62b86def764d9cc03d197bc945b54dc9c814fc0b011293036cfd7848", + expectedEncode: "07000000000000000001a81a775d8475e6ad5ae8dfe907d0e8755e968549bce449d1521ac2f1a1ca670000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0x5b6fe96b62980feb492c511a0fe232cbfdf15a2f7b927142a54680e76cee8ee2", }, } @@ -238,39 +238,39 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 9, + InitialL1MessageIndex: 10, InitialL1MessageQueueHash: common.Hash{}, LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "00070000650120a6050300f40400000900c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1c000040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", - expectedBlobVersionedHash: "0x01b7709103b123307086d3b4db44cde8d96d1a94dcf80e195461330c65939872", + expectedBlobEncode: "00070000650120a6050300f40400000a00c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1c000040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", + expectedBlobVersionedHash: "0x01170b69948c60bc987f26402b33a5fc15f10275c830dfb0fafcdae811d18f00", }, { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 9, + InitialL1MessageIndex: 10, InitialL1MessageQueueHash: common.Hash{}, LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "0007000c6801602517156300b49600000900c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1c00002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", - expectedBlobVersionedHash: "0x012979a80d8cadee682dedb27d9b4a78d2c71c032ea875f853892e9c5c42d0e9", + expectedBlobEncode: "0007000c6801602517156300b49600000a00c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1c00002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", + expectedBlobVersionedHash: "0x01a81a775d8475e6ad5ae8dfe907d0e8755e968549bce449d1521ac2f1a1ca67", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 36, + InitialL1MessageIndex: 37, InitialL1MessageQueueHash: common.Hash{}, LastL1MessageQueueHash: common.HexToHash("0xb3e7e7f02af64f130535f65b0e7375d4ad3c43c2f05ad3dbe7402ab000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - expectedBlobEncode: "0007000c4f016005174d6200049500002400b3e7e7f02af64f130535f65b0e730075d4ad3c43c2f05ad3dbe7402ab00002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", - expectedBlobVersionedHash: "0x01542d50fc3164320026dbcccc104931c29c363c30874701d1b942fa8aa90795", + expectedBlobEncode: "0007000c4f016005174d6200049500002500b3e7e7f02af64f130535f65b0e730075d4ad3c43c2f05ad3dbe7402ab00002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", + expectedBlobVersionedHash: "0x01aaa15d8da84ac255b2c5912b9b2565a8888a11f174b7038375824895416c1e", }, // test error cases { @@ -302,7 +302,7 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 36, + InitialL1MessageIndex: 37, InitialL1MessageQueueHash: common.Hash{1}, LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, @@ -701,7 +701,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { { name: "Single Block 04", batch: &Batch{ - InitialL1MessageIndex: 9, + InitialL1MessageIndex: 10, LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, @@ -710,7 +710,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { { name: "Single Block 05, only L1 messages", batch: &Batch{ - InitialL1MessageIndex: 36, + InitialL1MessageIndex: 37, LastL1MessageQueueHash: common.HexToHash("0xb3e7e7f02af64f130535f65b0e7375d4ad3c43c2f05ad3dbe7402ab000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, @@ -733,9 +733,9 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { creationErr: "unexpected queue index", }, { - name: "Multiple Blocks 02, 03, 04, 05", + name: "Multiple Blocks 02, 03, 04", batch: &Batch{ - InitialL1MessageIndex: 9, + InitialL1MessageIndex: 10, LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 5a3750e..d4c870d 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -247,10 +247,10 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } // sanity check: L1 message indices are contiguous across blocks boundaries if numL1Messages > 0 { - if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex { - return nil, fmt.Errorf("failed to sanity check L1 messages count: l1MessageIndex + numL1Messages != highestQueueIndex: %d + %d != %d", l1MessageIndex, numL1Messages, highestQueueIndex) + if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { + return nil, fmt.Errorf("failed to sanity check L1 messages count after block %d: l1MessageIndex + numL1Messages != highestQueueIndex+1: %d + %d != %d", block.Header.Number.Uint64(), l1MessageIndex, numL1Messages, highestQueueIndex+1) } - l1MessageIndex = highestQueueIndex + l1MessageIndex += uint64(numL1Messages) } daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) diff --git a/encoding/da.go b/encoding/da.go index e9ba789..4f2b07e 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -110,6 +110,7 @@ type Chunk struct { Blocks []*Block `json:"blocks"` // CodecV7. Used for chunk creation in relayer. + InitialL1MessageIndex uint64 InitialL1MessageQueueHash common.Hash LastL1MessageQueueHash common.Hash } From 8fa5e276e4ebd7c88d4830bd7ab01a7df6cff3f2 Mon Sep 17 00:00:00 2001 From: Rohit Narurkar Date: Fri, 14 Feb 2025 09:28:31 +0000 Subject: [PATCH 33/47] Fix: rolling hash implementation (#42) * fix: clear 32 bits instead of 36 * fix: test expectations for rolling hash * fix: tests * fix tests --------- Co-authored-by: jonastheis <4181434+jonastheis@users.noreply.github.com> --- encoding/codecv7_test.go | 30 +++++++++++++++--------------- encoding/da.go | 7 +------ encoding/da_test.go | 6 +++--- 3 files changed, 19 insertions(+), 24 deletions(-) diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 65a09a9..8c7e0ed 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -140,15 +140,15 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { name: "Batch with 3 blocks, blocktrace 02, 03, 04", batch: &Batch{ InitialL1MessageIndex: 10, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4), }, }, - expectedEncode: "07000000000000000001a81a775d8475e6ad5ae8dfe907d0e8755e968549bce449d1521ac2f1a1ca670000000000000000000000000000000000000000000000000000000000000000", - expectedHash: "0x5b6fe96b62980feb492c511a0fe232cbfdf15a2f7b927142a54680e76cee8ee2", + expectedEncode: "07000000000000000001f6f07ae03e8a6ead4384c206ac3d38cd453c1da0516dad7608713bd35bb92d0000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0x41c47973d04ecb5d10eca505f0a73964976d7dd4d32f0970d29b006650c85b20", }, } @@ -240,11 +240,11 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { ParentBatchHash: common.Hash{}, InitialL1MessageIndex: 10, InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "00070000650120a6050300f40400000a00c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1c000040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", - expectedBlobVersionedHash: "0x01170b69948c60bc987f26402b33a5fc15f10275c830dfb0fafcdae811d18f00", + expectedBlobEncode: "00070000650120a6050300f40400000a00c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1cc00040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", + expectedBlobVersionedHash: "0x017f5ad1717f1e48ed6a01647d0c038f87d075ea1b712129156ae0b0fa8dbb7a", }, { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", @@ -253,11 +253,11 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { ParentBatchHash: common.Hash{}, InitialL1MessageIndex: 10, InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "0007000c6801602517156300b49600000a00c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1c00002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", - expectedBlobVersionedHash: "0x01a81a775d8475e6ad5ae8dfe907d0e8755e968549bce449d1521ac2f1a1ca67", + expectedBlobEncode: "0007000c6801602517156300b49600000a00c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1cc0002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", + expectedBlobVersionedHash: "0x01f6f07ae03e8a6ead4384c206ac3d38cd453c1da0516dad7608713bd35bb92d", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", @@ -266,11 +266,11 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { ParentBatchHash: common.Hash{2}, InitialL1MessageIndex: 37, InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xb3e7e7f02af64f130535f65b0e7375d4ad3c43c2f05ad3dbe7402ab000000000"), + LastL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - expectedBlobEncode: "0007000c4f016005174d6200049500002500b3e7e7f02af64f130535f65b0e730075d4ad3c43c2f05ad3dbe7402ab00002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", - expectedBlobVersionedHash: "0x01aaa15d8da84ac255b2c5912b9b2565a8888a11f174b7038375824895416c1e", + expectedBlobEncode: "0007000c4f016005174d62000495000025003d35d6b71c2769de1a4eb8f603e2000f539c53a10c6764a6f5836cf1310002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", + expectedBlobVersionedHash: "0x019122a14fdcf36ccab4f507a6d9d45f3a1d17479e108e05ca4b27341b2da98f", }, // test error cases { @@ -702,7 +702,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Single Block 04", batch: &Batch{ InitialL1MessageIndex: 10, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, expectCompatible: true, @@ -711,7 +711,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Single Block 05, only L1 messages", batch: &Batch{ InitialL1MessageIndex: 37, - LastL1MessageQueueHash: common.HexToHash("0xb3e7e7f02af64f130535f65b0e7375d4ad3c43c2f05ad3dbe7402ab000000000"), + LastL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, expectCompatible: true, @@ -736,7 +736,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Multiple Blocks 02, 03, 04", batch: &Batch{ InitialL1MessageIndex: 10, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1c000000000"), + LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), diff --git a/encoding/da.go b/encoding/da.go index 4f2b07e..d96ecd6 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -815,12 +815,7 @@ func messageQueueV2ApplyL1Message(initialQueueHash common.Hash, message *types.L } func messageQueueV2EncodeRollingHash(rollingHash common.Hash) common.Hash { - // clear last 36 bits - - // Clear the lower 4 bits of byte 26 (preserving the upper 4 bits) - rollingHash[27] &= 0xF0 - - // Clear the next 4 bytes (32 bits total) + // clear last 32 bits, i.e. 4 bytes. rollingHash[28] = 0 rollingHash[29] = 0 rollingHash[30] = 0 diff --git a/encoding/da_test.go b/encoding/da_test.go index 08df23c..a36ea2b 100644 --- a/encoding/da_test.go +++ b/encoding/da_test.go @@ -181,7 +181,7 @@ func TestMessageQueueV2EncodeRollingHash(t *testing.T) { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xF0, + 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, }, }, @@ -204,14 +204,14 @@ func TestMessageQueueV2EncodeRollingHash(t *testing.T) { 0xCC, 0xDD, 0xEE, 0xFF, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x88, 0x99, 0xAA, 0xB0, + 0x88, 0x99, 0xAA, 0xBB, 0x00, 0x00, 0x00, 0x00, }, }, { "random hash", common.HexToHash("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), - common.HexToHash("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567000000000"), + common.HexToHash("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567800000000"), }, } From 4f13363e356b8f66c273bcc6d36071f4714b6ba1 Mon Sep 17 00:00:00 2001 From: Jonas Theis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 18 Feb 2025 14:22:03 +0800 Subject: [PATCH 34/47] Apply suggestions from code review Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com> --- encoding/codecv7.go | 2 +- encoding/codecv7_types.go | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index b07c62a..88b5421 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -222,7 +222,7 @@ func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) { } // NewDABatchFromBytes decodes the given byte slice into a DABatch. -// Note: This function only populates the batch header, it leaves the blob-related fields and skipped L1 message bitmap empty. +// Note: This function only populates the batch header, it leaves the blob-related fields empty. func (d *DACodecV7) NewDABatchFromBytes(data []byte) (DABatch, error) { daBatch, err := decodeDABatchV7(data) if err != nil { diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index d4c870d..8a21c26 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -28,14 +28,14 @@ const ( ) // Below is the encoding format for BlobEnvelopeV7. -// * Field Bytes Type Index Comments -// * version 1 uint8 0 The version of the DA codec (batch/blob) -// * n_bytes[1] 1 uint8 1 Value denoting the number of bytes, n_bytes[1]*256^2 -// * n_bytes[2] 1 uint8 2 Value denoting the number of bytes, n_bytes[2]*256 -// * n_bytes[3] 1 uint8 3 Value denoting the number of bytes, n_bytes[3] -// * flag 1 bool 4 1-byte flag to denote zstd-encoded/raw bytes -// * payload N bytes 5 Possibly zstd-encoded payload bytes -// * padding (4096*31 - (N+5)) bytes N+5 Padding to align to 4096*31 bytes +// * Field Bytes Type Index Comments +// * version 1 uint8 0 The version of the DA codec (batch/blob) +// * n_bytes[1] 1 uint8 1 Value denoting the number of bytes, n_bytes[1]*256^2 +// * n_bytes[2] 1 uint8 2 Value denoting the number of bytes, n_bytes[2]*256 +// * n_bytes[3] 1 uint8 3 Value denoting the number of bytes, n_bytes[3] +// * flag 1 bool 4 1-byte flag to denote zstd-encoded/raw bytes +// * payload N bytes 5 Possibly zstd-encoded payload bytes +// * padding (4096*31 - (N+5)) bytes N+5 Padding to align to 4096*31 bytes const ( blobEnvelopeV7OffsetVersion = 0 @@ -45,7 +45,7 @@ const ( ) // Below is the encoding for blobPayloadV7. -// * Field Bytes Type Index Comments +// * Field Bytes Type Index Comments // * initialL1MessageIndex 8 uint64 0 Queue index of the first L1 message contained in this batch // * initialL1MessageQueueHash 32 bytes32 8 Hash of the L1 message queue at the last message in the previous batch // * lastL1MessageQueueHash 32 bytes32 40 Hash of the L1 message queue at the last message in this batch From bcad556f165d0a727c5b57340fb55044fdd4fdf9 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 18 Feb 2025 14:35:20 +0800 Subject: [PATCH 35/47] rename initialL1MessageQueueHash -> prevL1MessageQueueHash and lastL1MessageQueueHash -> postL1MessageQueueHash --- encoding/codecv7.go | 26 ++++---- encoding/codecv7_test.go | 136 +++++++++++++++++++------------------- encoding/codecv7_types.go | 54 +++++++-------- encoding/da.go | 14 ++-- encoding/interfaces.go | 4 +- 5 files changed, 117 insertions(+), 117 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 88b5421..75b0e64 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -112,13 +112,13 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs, // transactions ) - // sanity check: initialL1MessageQueueHash+apply(L1Messages) = lastL1MessageQueueHash - computedLastL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(chunk.InitialL1MessageQueueHash, chunk.Blocks) + // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash + computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(chunk.PrevL1MessageQueueHash, chunk.Blocks) if err != nil { - return nil, fmt.Errorf("failed to apply L1 messages to initialL1MessageQueueHash: %w", err) + return nil, fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) } - if computedLastL1MessageQueueHash != chunk.LastL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check lastL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedLastL1MessageQueueHash, chunk.LastL1MessageQueueHash) + if computedPostL1MessageQueueHash != chunk.PostL1MessageQueueHash { + return nil, fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, chunk.PostL1MessageQueueHash) } return daChunk, nil @@ -212,10 +212,10 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) { blobPayload := blobPayloadV7{ - initialL1MessageIndex: batch.InitialL1MessageIndex, - initialL1MessageQueueHash: batch.InitialL1MessageQueueHash, - lastL1MessageQueueHash: batch.LastL1MessageQueueHash, - blocks: batch.Blocks, + initialL1MessageIndex: batch.InitialL1MessageIndex, + prevL1MessageQueueHash: batch.PrevL1MessageQueueHash, + postL1MessageQueueHash: batch.PostL1MessageQueueHash, + blocks: batch.Blocks, } return blobPayload.Encode() @@ -376,10 +376,10 @@ func (d *DACodecV7) estimateL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, // EstimateChunkL1CommitBatchSizeAndBlobSize estimates the L1 commit batch size and blob size for a single chunk. func (d *DACodecV7) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { return d.estimateL1CommitBatchSizeAndBlobSize(&Batch{ - Blocks: chunk.Blocks, - InitialL1MessageIndex: chunk.InitialL1MessageIndex, - InitialL1MessageQueueHash: chunk.InitialL1MessageQueueHash, - LastL1MessageQueueHash: chunk.LastL1MessageQueueHash, + Blocks: chunk.Blocks, + InitialL1MessageIndex: chunk.InitialL1MessageIndex, + PrevL1MessageQueueHash: chunk.PrevL1MessageQueueHash, + PostL1MessageQueueHash: chunk.PostL1MessageQueueHash, }) } diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 8c7e0ed..a5d9785 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -140,7 +140,7 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { name: "Batch with 3 blocks, blocktrace 02, 03, 04", batch: &Batch{ InitialL1MessageIndex: 10, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), + PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), @@ -211,24 +211,24 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { { name: "Empty batch", batch: &Batch{ - Index: 1, - ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 0, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.Hash{}, - Blocks: []*Block{}, + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 0, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.Hash{}, + Blocks: []*Block{}, }, creationErr: "batch must contain at least one block", }, { name: "Batch with 1 block, blocktrace 02", batch: &Batch{ - Index: 1, - ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 0, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.Hash{}, - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 0, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.Hash{}, + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, expectedBlobEncode: "00070000f901606c009d0700240e000002000163807b2a1de9000355418d1e81008400020000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e002adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa7008e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19fea00cd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf871010100bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f007b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bc00ec5bd4aba684835996fc3f879380aac1c09c6eed32f105006032821d60094200a4b00e450116", expectedBlobVersionedHash: "0x01a40a4ae0fa894115c6d157d928ae6d5b95e3a38e39d0112086db7a5b94d21e", @@ -236,12 +236,12 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { { name: "Batch with 1 blocks, blocktrace 04 - 1 L1 message + 1 L2 tx", batch: &Batch{ - Index: 1, - ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 10, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), - Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 10, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), + Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, expectedBlobEncode: "00070000650120a6050300f40400000a00c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1cc00040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", expectedBlobVersionedHash: "0x017f5ad1717f1e48ed6a01647d0c038f87d075ea1b712129156ae0b0fa8dbb7a", @@ -249,12 +249,12 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", batch: &Batch{ - Index: 1, - ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 10, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 10, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, expectedBlobEncode: "0007000c6801602517156300b49600000a00c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1cc0002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", expectedBlobVersionedHash: "0x01f6f07ae03e8a6ead4384c206ac3d38cd453c1da0516dad7608713bd35bb92d", @@ -262,12 +262,12 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 37, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 37, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, expectedBlobEncode: "0007000c4f016005174d62000495000025003d35d6b71c2769de1a4eb8f603e2000f539c53a10c6764a6f5836cf1310002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", expectedBlobVersionedHash: "0x019122a14fdcf36ccab4f507a6d9d45f3a1d17479e108e05ca4b27341b2da98f", @@ -276,60 +276,60 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong initialL1MessageIndex", batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 21, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 21, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, creationErr: "failed to sanity check L1 messages count", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong (not consecutive) block number", batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 21, - InitialL1MessageQueueHash: common.Hash{}, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_05.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 21, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_05.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, creationErr: "invalid block number", }, { - name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong LastL1MessageQueueHash", + name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong PostL1MessageQueueHash", batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 37, - InitialL1MessageQueueHash: common.Hash{1}, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 37, + PrevL1MessageQueueHash: common.Hash{1}, + PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - creationErr: "failed to sanity check lastL1MessageQueueHash", + creationErr: "failed to sanity check postL1MessageQueueHash", }, { name: "Batch with 3 blocks, blocktrace 02, 04 + 05 (L1 messages only), but with non-consecutive L1 messages number across blocks 04 and 05", batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 9, - InitialL1MessageQueueHash: common.Hash{1}, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 4)}, + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 9, + PrevL1MessageQueueHash: common.Hash{1}, + PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 4)}, }, creationErr: "failed to sanity check L1 messages count", }, { name: "Batch with 3 blocks, blocktrace 02, 06, but with non-consecutive L1 messages number within block 06", batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 9, - InitialL1MessageQueueHash: common.Hash{1}, - LastL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_06.json"), 3)}, + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 9, + PrevL1MessageQueueHash: common.Hash{1}, + PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_06.json"), 3)}, }, creationErr: "unexpected queue index", }, @@ -354,8 +354,8 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { require.NoError(t, err) require.Equal(t, tc.batch.InitialL1MessageIndex, blobPayload.InitialL1MessageIndex()) - require.Equal(t, tc.batch.InitialL1MessageQueueHash, blobPayload.InitialL1MessageQueueHash()) - require.Equal(t, tc.batch.LastL1MessageQueueHash, blobPayload.LastL1MessageQueueHash()) + require.Equal(t, tc.batch.PrevL1MessageQueueHash, blobPayload.PrevL1MessageQueueHash()) + require.Equal(t, tc.batch.PostL1MessageQueueHash, blobPayload.PostL1MessageQueueHash()) // check correctness of decoded blocks and transactions require.Equal(t, len(tc.batch.Blocks), len(blobPayload.Blocks())) @@ -702,7 +702,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Single Block 04", batch: &Batch{ InitialL1MessageIndex: 10, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), + PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, expectCompatible: true, @@ -711,7 +711,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Single Block 05, only L1 messages", batch: &Batch{ InitialL1MessageIndex: 37, - LastL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), + PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, expectCompatible: true, @@ -736,7 +736,7 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { name: "Multiple Blocks 02, 03, 04", batch: &Batch{ InitialL1MessageIndex: 10, - LastL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), + PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 8a21c26..2ea49d0 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -47,8 +47,8 @@ const ( // Below is the encoding for blobPayloadV7. // * Field Bytes Type Index Comments // * initialL1MessageIndex 8 uint64 0 Queue index of the first L1 message contained in this batch -// * initialL1MessageQueueHash 32 bytes32 8 Hash of the L1 message queue at the last message in the previous batch -// * lastL1MessageQueueHash 32 bytes32 40 Hash of the L1 message queue at the last message in this batch +// * prevL1MessageQueueHash 32 bytes32 8 hash of the L1 message queue at the end of previous batch +// * postL1MessageQueueHash 32 bytes32 40 hash of the L1 message queue at the end of this batch // * initialL2BlockNumber 8 uint64 72 The initial L2 block number in this batch // * numBlocks 2 uint16 80 The number of blocks in this batch // * block[0] 52 BlockContextV2 82 The first block in this batch @@ -59,8 +59,8 @@ const ( const ( blobPayloadV7MinEncodedLength = 8 + 2*common.HashLength + 8 + 2 blobPayloadV7OffsetInitialL1MessageIndex = 0 - blobPayloadV7OffsetInitialL1MessageQueue = 8 - blobPayloadV7OffsetLastL1MessageQueue = 40 + blobPayloadV7OffsetPrevL1MessageQueue = 8 + blobPayloadV7OffsetPostL1MessageQueue = 40 blobPayloadV7OffsetInitialL2BlockNumber = 72 blobPayloadV7OffsetNumBlocks = 80 blobPayloadV7OffsetBlocks = 82 @@ -187,9 +187,9 @@ func (b *daBatchV7) DataHash() common.Hash { } type blobPayloadV7 struct { - initialL1MessageIndex uint64 - initialL1MessageQueueHash common.Hash - lastL1MessageQueueHash common.Hash + initialL1MessageIndex uint64 + prevL1MessageQueueHash common.Hash + postL1MessageQueueHash common.Hash // used for encoding blocks []*Block @@ -202,12 +202,12 @@ type blobPayloadV7 struct { func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { return b.initialL1MessageIndex } -func (b *blobPayloadV7) InitialL1MessageQueueHash() common.Hash { - return b.initialL1MessageQueueHash +func (b *blobPayloadV7) PrevL1MessageQueueHash() common.Hash { + return b.prevL1MessageQueueHash } -func (b *blobPayloadV7) LastL1MessageQueueHash() common.Hash { - return b.lastL1MessageQueueHash +func (b *blobPayloadV7) PostL1MessageQueueHash() common.Hash { + return b.postL1MessageQueueHash } func (b *blobPayloadV7) Blocks() []DABlock { @@ -221,9 +221,9 @@ func (b *blobPayloadV7) Transactions() []types.Transactions { func (b *blobPayloadV7) Encode() ([]byte, error) { payloadBytes := make([]byte, blobPayloadV7MinEncodedLength) - binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetInitialL1MessageQueue], b.initialL1MessageIndex) - copy(payloadBytes[blobPayloadV7OffsetInitialL1MessageQueue:blobPayloadV7OffsetLastL1MessageQueue], b.initialL1MessageQueueHash[:]) - copy(payloadBytes[blobPayloadV7OffsetLastL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber], b.lastL1MessageQueueHash[:]) + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetPrevL1MessageQueue], b.initialL1MessageIndex) + copy(payloadBytes[blobPayloadV7OffsetPrevL1MessageQueue:blobPayloadV7OffsetPostL1MessageQueue], b.prevL1MessageQueueHash[:]) + copy(payloadBytes[blobPayloadV7OffsetPostL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber], b.postL1MessageQueueHash[:]) var initialL2BlockNumber uint64 if len(b.blocks) > 0 { @@ -270,13 +270,13 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } payloadBytes = append(payloadBytes, transactionBytes...) - // sanity check: initialL1MessageQueueHash+apply(L1Messages) = lastL1MessageQueueHash - computedLastL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(b.initialL1MessageQueueHash, b.blocks) + // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash + computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(b.prevL1MessageQueueHash, b.blocks) if err != nil { - return nil, fmt.Errorf("failed to apply L1 messages to initialL1MessageQueueHash: %w", err) + return nil, fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) } - if computedLastL1MessageQueueHash != b.lastL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check lastL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedLastL1MessageQueueHash, b.lastL1MessageQueueHash) + if computedPostL1MessageQueueHash != b.postL1MessageQueueHash { + return nil, fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, b.postL1MessageQueueHash) } return payloadBytes, nil @@ -287,9 +287,9 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7MinEncodedLength, len(data)) } - initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetInitialL1MessageQueue]) - initialL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetInitialL1MessageQueue:blobPayloadV7OffsetLastL1MessageQueue]) - lastL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetLastL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber]) + initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetPrevL1MessageQueue]) + prevL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetPrevL1MessageQueue:blobPayloadV7OffsetPostL1MessageQueue]) + postL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetPostL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber]) initialL2BlockNumber := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks]) numBlocks := int(binary.BigEndian.Uint16(data[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks])) @@ -337,11 +337,11 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { } return &blobPayloadV7{ - initialL1MessageIndex: initialL1MessageIndex, - initialL1MessageQueueHash: initialL1MessageQueueHash, - lastL1MessageQueueHash: lastL1MessageQueueHash, - daBlocks: daBlocks, - l2Transactions: transactions, + initialL1MessageIndex: initialL1MessageIndex, + prevL1MessageQueueHash: prevL1MessageQueueHash, + postL1MessageQueueHash: postL1MessageQueueHash, + daBlocks: daBlocks, + l2Transactions: transactions, }, nil } diff --git a/encoding/da.go b/encoding/da.go index d96ecd6..d78be89 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -110,9 +110,9 @@ type Chunk struct { Blocks []*Block `json:"blocks"` // CodecV7. Used for chunk creation in relayer. - InitialL1MessageIndex uint64 - InitialL1MessageQueueHash common.Hash - LastL1MessageQueueHash common.Hash + InitialL1MessageIndex uint64 + PrevL1MessageQueueHash common.Hash + PostL1MessageQueueHash common.Hash } // Batch represents a batch of chunks. @@ -123,10 +123,10 @@ type Batch struct { Chunks []*Chunk // CodecV7 - InitialL1MessageIndex uint64 - InitialL1MessageQueueHash common.Hash - LastL1MessageQueueHash common.Hash - Blocks []*Block + InitialL1MessageIndex uint64 + PrevL1MessageQueueHash common.Hash + PostL1MessageQueueHash common.Hash + Blocks []*Block } // NumL1Messages returns the number of L1 messages in this block. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 29cab0e..5259a58 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -45,8 +45,8 @@ type DABlobPayload interface { Blocks() []DABlock Transactions() []types.Transactions InitialL1MessageIndex() uint64 - InitialL1MessageQueueHash() common.Hash - LastL1MessageQueueHash() common.Hash + PrevL1MessageQueueHash() common.Hash + PostL1MessageQueueHash() common.Hash } // Codec represents the interface for encoding and decoding DA-related structures. From 75229310413548d117d864e493146a78b5938095 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 18 Feb 2025 14:51:51 +0800 Subject: [PATCH 36/47] address review comments --- encoding/codecv7.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 75b0e64..e2deeb6 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -241,7 +241,7 @@ func (d *DACodecV7) NewDABatchFromParams(batchIndex uint64, blobVersionedHash, p } func (d *DACodecV7) DecodeDAChunksRawTx(_ [][]byte) ([]*DAChunkRawTx, error) { - return nil, nil + return nil, errors.New("DecodeDAChunksRawTx is not implemented for DACodecV7, use DecodeBlob instead") } func (d *DACodecV7) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { From 32f5b49b9e1940475eb4283e34a1366726292eee Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 18 Feb 2025 14:54:31 +0800 Subject: [PATCH 37/47] address review comments --- encoding/codecv7_types.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 2ea49d0..9f823a3 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -175,13 +175,13 @@ func (b *daBatchV7) Version() CodecVersion { } // SkippedL1MessageBitmap returns the skipped L1 message bitmap of the DABatch. -// For daBatchV7, there is no skipped L1 message bitmap. +// Note: For daBatchV7, there is no skipped L1 message bitmap, therefore the function returns nil. func (b *daBatchV7) SkippedL1MessageBitmap() []byte { return nil } // DataHash returns the data hash of the DABatch. -// For daBatchV7, there is no data hash. +// Note: For daBatchV7, there is no data hash, therefore the function returns an empty hash. func (b *daBatchV7) DataHash() common.Hash { return common.Hash{} } From 0247443faabd059fbbf12cf7da08793cc0da96c4 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Tue, 18 Feb 2025 19:46:52 +0800 Subject: [PATCH 38/47] add challenge digest computation for batch --- encoding/codecv7_test.go | 96 +++++++++++++++++++++++++++++++++++++++ encoding/codecv7_types.go | 27 ++++++++++- 2 files changed, 121 insertions(+), 2 deletions(-) diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index a5d9785..d73b996 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -13,6 +13,7 @@ import ( "github.com/scroll-tech/go-ethereum/common" "github.com/scroll-tech/go-ethereum/common/hexutil" "github.com/scroll-tech/go-ethereum/core/types" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -831,6 +832,101 @@ func TestDACodecV7JSONFromBytes(t *testing.T) { } } +func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { + testCases := []struct { + name string + batch *Batch + creationErr string + expectedBlobDataProof string + }{ + { + name: "Batch with 1 block, blocktrace 02", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 0, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.Hash{}, + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, + }, + expectedBlobDataProof: "2781a7620cf0cecb23596d7fee625cc26c61b7d605eb2ff43c7ff98fa4a8f3152bbf1b44bb80d37bdd2a352d25e88ea27377cfc3dd7a818f17fe397942dc6400901bb682fa2d91cca8005e181302e0f6e87553579a2d4b16b94e911f7c8b9703492d84fc5765212bc1c1796583e2b86aac6f758bf87fc1a1055c92e03d7217522e31f337255a63fa2b9573714b1e2af4b5e9ce3ab7c2b93a1acc637663435ef5", + }, + { + name: "Batch with 1 block, blocktrace 03", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 0, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.Hash{}, + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_03.json")}, + }, + expectedBlobDataProof: "42e370c6467ad79b5f7d79050685752fc043d8ff03505aae427bb35c6950d9e4475d546b7166b79e5faee7dd779585846e43de90c67201762ea7f3b6ca9b965b885613949b4e3624a6f8d9f4ec8e03f97d6ad2b3d4ca3462928907ae63189302c75e39974ec2c85e29911b8c3191af2b928bfea24d5c235a4ab0bfd66b6892b0b5a88e4b2cbd7e8dea48fae0a90ed84297dc6149954a2a7245fc5b9f5a258663", + }, + { + name: "Batch with 1 block, blocktrace 04", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{1, 2, 3, 4}, + InitialL1MessageIndex: 10, + PrevL1MessageQueueHash: common.Hash{1, 2, 3, 4}, + PostL1MessageQueueHash: common.HexToHash("0x6250cf03e7f922eefe450e9d4234ec56a1502066cd55eff22939df6100000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, + }, + expectedBlobDataProof: "43c83acecf2100a74f1cce1a7a62101af22a744f1d61aca5bae8a6bd81a0d2040116ade6d71ec98b6208cfe96c1241c092018506893f4d652a43febcc11a1f2dad8549493363f782fd8893ba193e05498e85d7e0cec10b53ff4e7b53e06659d0209a12b663e3807541c3a4ec6ac0561ea44941243065b683efedfe91c2f84cc90ab5251646d6f929899bb6ce74b0320eb22c31bfe460659b1191c99bfc7afdd6", + }, + { + name: "Batch with 1 block, blocktrace 05", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 37, + PrevL1MessageQueueHash: common.Hash{5, 6, 7, 8}, + PostL1MessageQueueHash: common.HexToHash("0xc31c3ca9a880b80c4e7fcb88844a5e21433bd2801bdd504e1ca4aed900000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, + }, + expectedBlobDataProof: "533782971f05c7a9eb627dc74614984f0c25bea4d2bf5a211360b51b9301dcd327587f1945a25f9063cab01372d4609430c193b66e60a34afe41a5ff341b4673a526889dd5d4c35affcfb513c910d8868deecc52fc40db17ce1eb67b0c1152d56c02dbe5b0f9eb7401649e59d8af6bb7ac69a24a5e2d06ca4ec8b927d0c9b7ceb9e6ef8f71edfa1d5135c183884c88a9d04ae993f006315e5318bb67c15c3b89", + }, + { + name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", + batch: &Batch{ + Index: 1, + ParentBatchHash: common.Hash{}, + InitialL1MessageIndex: 10, + PrevL1MessageQueueHash: common.Hash{9, 10, 11}, + PostL1MessageQueueHash: common.HexToHash("0x20f1c72064552d63fb7e1352b7815a9f8231a028220bf63d27b24bec00000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, + }, + expectedBlobDataProof: "009064652841cb148bc7516a4e5835e8ecc0e1e6e11c9b57378ed90e74e845056dde6e9763ffeab8fc5c8cfcbb33a23a80558429f38cb0e8ef9e2a8c62718b1aa9068ee04e998fdec84a3d6681b70696ccd99dd0ab20cfea19e52d91de68b4f73a0da2ceeb1c64131c4a20b9de632d188fe355ae8a9ab57e3bf8792a99a605b088abbfb656cca16758cf301c7863140b3578867cb03bb42956462808e7c72171", + }, + { + name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", + batch: &Batch{ + Index: 3, + ParentBatchHash: common.Hash{2}, + InitialL1MessageIndex: 37, + PrevL1MessageQueueHash: common.Hash{}, + PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), + Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, + }, + expectedBlobDataProof: "5e52251386a249e9f8e6c458125fe3347d2358190ee03aa81b0d37128521a75e10c7f2f975cb37f42098a1d173feeaa867da04ecffbdcb6459c5d492a5b0ff048014b94261a8c345d86762e5a96d7f461083d34533175e30ec4ac5ab6cb7360c092822225fd9e5522be341b5f7ad88229394ef2568cd55a8dc60ec62ba818843d8acd83d0642203a19931fea4242cca9ec277b9ae16709b23d65376b85971e2f", + }, + } + + codecV7, err := CodecFromVersion(CodecV7) + require.NoError(t, err) + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + daBatch, err := codecV7.NewDABatch(tc.batch) + require.NoError(t, err) + verifyData, err := daBatch.BlobDataProofForPointEvaluation() + require.NoError(t, err) + assert.Equal(t, tc.expectedBlobDataProof, hex.EncodeToString(verifyData)) + }) + } +} + func assertEqualDABlocks(t *testing.T, expected, actual DABlock) { require.Equal(t, expected.Number(), actual.Number()) require.Equal(t, expected.NumTransactions(), actual.NumTransactions()) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 9f823a3..af986ab 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -3,6 +3,7 @@ package encoding import ( "bytes" "encoding/binary" + "encoding/hex" "encoding/json" "fmt" "math/big" @@ -136,9 +137,31 @@ func (b *daBatchV7) Hash() common.Hash { } // BlobDataProofForPointEvaluation computes the abi-encoded blob verification data. -// Note: This method is not implemented for daBatchV7. func (b *daBatchV7) BlobDataProofForPointEvaluation() ([]byte, error) { - return nil, nil + challengeDigest := crypto.Keccak256Hash(crypto.Keccak256(b.blobBytes), b.blobVersionedHash.Bytes()) + + // z = challengeDigest % BLS_MODULUS + pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), blsModulus) + pointBytes := pointBigInt.Bytes() + + var z kzg4844.Point + if len(pointBytes) > kzgPointByteSize { + return nil, fmt.Errorf("pointBytes length exceeds %d bytes, got %d bytes", kzgPointByteSize, len(pointBytes)) + } + start := kzgPointByteSize - len(pointBytes) + copy(z[start:], pointBytes) + + commitment, err := kzg4844.BlobToCommitment(b.blob) + if err != nil { + return nil, fmt.Errorf("failed to create blob commitment: %w", err) + } + + proof, y, err := kzg4844.ComputeProof(b.blob, z) + if err != nil { + return nil, fmt.Errorf("failed to create KZG proof at point, err: %w, z: %v", err, hex.EncodeToString(z[:])) + } + + return blobDataProofFromValues(z, y, commitment, proof), nil } // Blob returns the blob of the batch. From 2043787704b00632d9de2a0ea3e7b0f9b0de21b3 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 19 Feb 2025 18:47:49 +0800 Subject: [PATCH 39/47] remove InitialL1MessageIndex from CodecV7 --- encoding/codecv7.go | 6 +- encoding/codecv7_test.go | 120 ++++++++++++++------------------------ encoding/codecv7_types.go | 49 +++++++--------- encoding/da.go | 20 ++++--- encoding/interfaces.go | 1 - 5 files changed, 79 insertions(+), 117 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index e2deeb6..76cb249 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -34,7 +34,7 @@ func (d *DACodecV7) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) return nil, errors.New("block number is not uint64") } - numL1Messages, highestQueueIndex, err := block.NumL1MessagesNoSkipping() + numL1Messages, _, highestQueueIndex, err := block.NumL1MessagesNoSkipping() if err != nil { return nil, fmt.Errorf("failed to calculate number of L1 messages: %w", err) } @@ -90,7 +90,7 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) } // sanity check (within NumL1MessagesNoSkipping): L1 message indices are contiguous within a block - numL1Messages, highestQueueIndex, err := block.NumL1MessagesNoSkipping() + numL1Messages, _, highestQueueIndex, err := block.NumL1MessagesNoSkipping() if err != nil { return nil, fmt.Errorf("failed to get numL1Messages: %w", err) } @@ -212,7 +212,6 @@ func (d *DACodecV7) constructBlob(batch *Batch) (*kzg4844.Blob, common.Hash, []b func (d *DACodecV7) constructBlobPayload(batch *Batch) ([]byte, error) { blobPayload := blobPayloadV7{ - initialL1MessageIndex: batch.InitialL1MessageIndex, prevL1MessageQueueHash: batch.PrevL1MessageQueueHash, postL1MessageQueueHash: batch.PostL1MessageQueueHash, blocks: batch.Blocks, @@ -377,7 +376,6 @@ func (d *DACodecV7) estimateL1CommitBatchSizeAndBlobSize(batch *Batch) (uint64, func (d *DACodecV7) EstimateChunkL1CommitBatchSizeAndBlobSize(chunk *Chunk) (uint64, uint64, error) { return d.estimateL1CommitBatchSizeAndBlobSize(&Batch{ Blocks: chunk.Blocks, - InitialL1MessageIndex: chunk.InitialL1MessageIndex, PrevL1MessageQueueHash: chunk.PrevL1MessageQueueHash, PostL1MessageQueueHash: chunk.PostL1MessageQueueHash, }) diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index d73b996..1078381 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -127,8 +127,8 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { batch: &Batch{ Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, - expectedEncode: "07000000000000000001a40a4ae0fa894115c6d157d928ae6d5b95e3a38e39d0112086db7a5b94d21e0000000000000000000000000000000000000000000000000000000000000000", - expectedHash: "0xae204a7f43d50947ed9033bddac0e8dcebeace076b60c20c4fdfd0284f94f5d4", + expectedEncode: "07000000000000000001fe584c5ad4177f0f204262f2dc663592702762b363509d726c2c6e05d6f3960000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0x6f7e34f79b096f96f989200c353ef3875fda0e8372690e09c360be865e161b50", }, { name: "Batch with 1 block, blocktrace 06, creation error=L1 messages not consecutive", @@ -140,7 +140,6 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { { name: "Batch with 3 blocks, blocktrace 02, 03, 04", batch: &Batch{ - InitialL1MessageIndex: 10, PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), @@ -148,8 +147,8 @@ func TestCodecV7DABatchHashEncodeDecode(t *testing.T) { replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4), }, }, - expectedEncode: "07000000000000000001f6f07ae03e8a6ead4384c206ac3d38cd453c1da0516dad7608713bd35bb92d0000000000000000000000000000000000000000000000000000000000000000", - expectedHash: "0x41c47973d04ecb5d10eca505f0a73964976d7dd4d32f0970d29b006650c85b20", + expectedEncode: "070000000000000000012f5d0b0130addfce5502c7ce3d04945634fa80efd4b996ce71e1f2203ced3f0000000000000000000000000000000000000000000000000000000000000000", + expectedHash: "0x4eb67346d4060cde3f68100ae247e30de7b2f908934b58de581a4f2930bb6810", }, } @@ -214,7 +213,6 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 0, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.Hash{}, Blocks: []*Block{}, @@ -226,72 +224,55 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 0, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.Hash{}, Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, - expectedBlobEncode: "00070000f901606c009d0700240e000002000163807b2a1de9000355418d1e81008400020000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e002adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa7008e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19fea00cd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf871010100bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f007b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bc00ec5bd4aba684835996fc3f879380aac1c09c6eed32f105006032821d60094200a4b00e450116", - expectedBlobVersionedHash: "0x01a40a4ae0fa894115c6d157d928ae6d5b95e3a38e39d0112086db7a5b94d21e", + expectedBlobEncode: "00070000f9016064009d0700240e000002000163807b2a1de9000355418d1e81008400020000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e002adeceeacb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa7008e7ba5cf6781e90cc32b219b1de102513d56548a41e86df514a034cbd19fea00cd73e8ce64d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf871010100bae6bf68e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f007b2cace28d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bc00ec5bd4aba684835996fc3f879380aac1c09c6eed32f105006032821d6009420094b00e410116", + expectedBlobVersionedHash: "0x01fe584c5ad4177f0f204262f2dc663592702762b363509d726c2c6e05d6f396", }, { name: "Batch with 1 blocks, blocktrace 04 - 1 L1 message + 1 L2 tx", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 10, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "00070000650120a6050300f40400000a00c7436aaec2cfaf39d5be02a02c6ac200089ab264c3e0fd142db682f1cc00040001646b6e137a120000020001df0b8000825dc0941a258d17bf244c4df02d40343a7626a9d321e105808080808006000039066e16790923b039d0f80258", - expectedBlobVersionedHash: "0x017f5ad1717f1e48ed6a01647d0c038f87d075ea1b712129156ae0b0fa8dbb7a", + expectedBlobEncode: "000700006201209eed0200d4040000c7436aaec2cfaf39d5be02a02c6ac2089a00b264c3e0fd142db682f1cc00040001646b6e137a120000020001df0b80825d00c0941a258d17bf244c4df02d40343a7626a9d321e105808080808005003906006e16790923b039116001", + expectedBlobVersionedHash: "0x01613f6d2f90590578d58e46f4ee246bf7727f1a85f01c76a327c499a2372481", }, { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 10, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobEncode: "0007000c6801602517156300b49600000a00c7436aaec2cfaf39d5be02a02c6a00c2089ab264c3e0fd142db682f1cc0002000363807b2a1de9000355418d1e810084000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a08110378151602083015160408085018000519151939592948301929184648211639083019060208201858179825181110082820188101794825250918201929091019080838360005b83c357818101510083820152602001620000a9565b50505050905090810190601f16f1578082030080516001836020036101000a0319168191508051604051939291900115012b0001460175015b01a39081015185519093508592508491620001c891600391850001906200026b565b508051620001de90600490602084506005805461ff00190060ff1990911660121716905550600680546001600160a01b0380881619928300161790925560078054928716929091169190911790556200023081620002550062010000600160b01b03191633021790555062000307915050565b60ff19160060ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de016000010185558215620002de579182015b8202de5782518255916020019190600100c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301578000639dc29fac14610309578063a457c2d714610335578063a9059cbb1461036100578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610200a55780638456cb59146102cb5780638e50817a146102d3313ce567116100de00571461021d578063395093511461023b5780633f4ba83a146102675780634000c10f191461027106fdde0314610110578063095ea7b31461018d5780631816000ddd146101cd57806323b872e7575b6101186103bb565b6040805160208082005283518183015283519192839290830161015261013a61017f9250508091030090f35b6101b9600480360360408110156101a381351690602001356104519100151582525190819003602001d561046e60fd81169160208101359091169060004074565b6102256104fb60ff90921640025105046f610552565b005b61026f00028705a956610654d520bb3516610662067d56e90135166106d21861075703001f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282810052606093909290918301828280156104475780601f1061041c57610100808300540402835291610447565b825b8154815260200180831161042a5782900360001f16820191565b600061046561045e610906565b848461090a565b506001920002548184f6565b6104f18461048d6104ec8560405180606080602861108560002891398a166000908152600160205260408120906104cb81019190915260400001600020549190610b51565b935460ff160511016000610522908116825260002080830193909352604091820120918c168152925290205490610be8565b60000716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b001bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090040060ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460006508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616374006f727960a0079283918216179091559390921660041561080808550e65086c002511176025006108968dd491824080832093909416825233831661094f5704000180806020018281038252602401806110f36024913960400191fd821661090094223d60228084166000819487168084529482529182902085905581518581005291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200a00c8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b00838383610f61565b610ac881265f60268685808220939093559084168152200054610af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc00378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111561000be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53610066654d6174683a206164646974696f6e206f766572666c6f7700610c9c147300621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd3008aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f0020746865207a65726f72657373610d546000600254610d610255902054610d008780838393519293910e2d6101001790557f62e78cea01bee320cd4e42027000b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610e00b68260000ef3221b85839020550f199082610fb540805182600091851691910020565b610f6cb07415610fb02a113c602a00610c428383401e7375627472610063815250fe7472616e736665726275726e20616d6f756e742065786365656400732062616c616e6365617070726f7665616c6c6f7766726f6d646563726561007365642062656c6f775061757361626c653a20746f6b656e7768696c652070006175736564a2646970667358221220e96342bec8f6c2bf72815a39998973b6004c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d900fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c00001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c00633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bda5209500d44b8a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e100058080808080814ba8d130a9149a111111110549d2741105c418e61894eb01001240132dcb629c42c818e2c888502022230a0a92a4660ef030c8fc0ddb85e200d215e23516285d6a71d2c1a2a351201ca40faeab44851c1fbf00022ce7407800cf901cb445e0306b08cd4a2ae0724e1a69fa2f7aaa8fd851465eda370fade700ee1a0754a65b8078358317f2b9a460eadb2eb338ac8411a449057b478e3c0a008e0987293e5ce118ae05ccbd6837b82de87a617154940bcebb0b88ffe7152700a9f199051c3311ca647ec728aa797d3ae1518f6aa4e348024239b0a5cb78ba00987e426d486756ee2460452ecaa3d1144d5f81412b92e003774763efe158ac004b52b7a96203be266a9b0232cb47ed216a773ff21a241bbabfc22080979fc200aded1bd0615426425652e36f784c92d96db151ec85cb10329135878563adb60099708967a33656729bf44924e051899c3ab3777f03148f5792a231d948a9de0007c1a68a51ba08e133d2c4db0577f63870f2430af1828b47113227da2e0d100032b92a06a32098f02854be1a42a786eec2e9fb35a97738caf6dd1d57188d3f007d29afe7f90ed912ae39132ffcb9741b8010d4f0f3292f811d01f34eab298800a7589f2030d5ea72f11ea3aa1327a64c4de1727122a0958b27aa7025bbaace0018739ab139fa2c36ec0f45a50f55f369672e65d092da47c48e56db72808bc1006bdb3cf8163c31b92c81d7e15f7ab6ae1b7740b28f67947924ce24fef45eb30017491d54e8e28719eee3946ad529583de2cb11ac09c8a704ec7335f5280e2800e97cc2e7cf7bb9245b1ae02c345dcb73998be05998b0def5f91c591330e65600b1c8bbc266faca3360d72a5d4a6edefc8c3854452460ba4a034b808c385fa800c7967a86a91e7af51660b410b97d40afa4fec3d49e522a995aa5ae6453663c00d46b84fc4ff1520634609db2201a6434008d91f0f1c73e8aa5e9f34056154b0070cd526d386d82fd155bd669540674f0e65aa05d301e9174d2e104a603eac600d1cb417f39838c4716b079e06ca3321aa7336319a40edc4a4cdfdb767a702d0012d526c29611c8d2c10817e39f4bc29d180ce6", - expectedBlobVersionedHash: "0x01f6f07ae03e8a6ead4384c206ac3d38cd453c1da0516dad7608713bd35bb92d", + expectedBlobEncode: "0007000c6601601d1705630094960000c7436aaec2cfaf39d5be02a02c6ac208009ab264c3e0fd142db682f1cc0002000363807b2a1de9000355418d1e818400000263807b2d1a2c0003546c3cbb39e50001646b6e137a120000020001f8718000843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0ca28a00152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf6781e90c00c32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d00c4d001996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a03fb200bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f20bd00e27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68483590096fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a17831197e2008080b915d260806040523480156200001157600080fd5b50604051620014b200380380833981810160405260a08110378151602083015160408085018051910051939592948301929184648211639083019060208201858179825181118282000188101794825250918201929091019080838360005b83c357818101518382000152602001620000a9565b50505050905090810190601f16f1578082038051006001836020036101000a0319168191508051604051939291900115012b0146000175015b01a39081015185519093508592508491620001c891600391850190006200026b565b508051620001de90600490602084506005805461ff001960ff001990911660121716905550600680546001600160a01b0380881619928316170090925560078054928716929091169190911790556200023081620002556201000000600160b01b03191633021790555062000307915050565b60ff191660ff00929092565b828160011615610100020316600290049060005260206000209000601f016020900481019282601f10620002ae5780518380011785de016001010085558215620002de579182015b8202de57825182559160200191906001c156005b50620002ec9291f0565b5090565b5b8002ec5760008155600101620002f100565b61119b80620003176000396000f3fe61001004361061010b576000356000e01c80635c975abb116100a257806395d89b4111610071146103015780639d00c29fac14610309578063a457c2d714610335578063a9059cbb1461036157800063dd62ed3e1461038d5761010b565b1461029d57806370a08231146102a5570080638456cb59146102cb5780638e50817a146102d3313ce567116100de57140061021d578063395093511461023b5780633f4ba83a1461026757806340c10f00191461027106fdde0314610110578063095ea7b31461018d57806318160ddd00146101cd57806323b872e7575b6101186103bb565b6040805160208082528300518183015283519192839290830161015261013a61017f92505080910390f3005b6101b9600480360360408110156101a381351690602001356104519115150082525190819003602001d561046e60fd81169160208101359091169060407400565b6102256104fb60ff90921640025105046f610552565b005b61026f02870005a956610654d520bb3516610662067d56e90135166106d218610757031f0700b856034b085f77c7d5a308db565b6003805420601f600260001961010060010088161502019095169490940493840181900481028201810190925282815260006093909290918301828280156104475780601f1061041c57610100808354040002835291610447565b825b8154815260200180831161042a57829003601f1600820191565b600061046561045e610906565b848461090a565b506001920254008184f6565b6104f18461048d6104ec8560405180606080602861108560289100398a166000908152600160205260408120906104cb81019190915260400160000020549190610b51565b935460ff160511016000610522908116825260208000830193909352604091820120918c168152925290205490610be8565b60071600331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b1b1bdd00d95960aa1b604482015290640190fd5b6105a7610c49565b610100900460ff0016156105f9106f14185d5cd8589b194e881c185d5cd9596082600606460650008282610ced909052604006ca0ddd900407260c6b6f6e6c7920466163746f72007960a0079283918216179091559390921660041561080808550e65086c251100176025006108968dd491824080832093909416825233831661094f5704018000806020018281038252602401806110f36024913960400191fd821661099422003d60228084166000819487168084529482529182902085905581518581529100517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c700c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a8b83830083610f61565b610ac881265f60268685808220939093559084168152205461000af7908220409490945580905191937fddf252ad1be2c89b69c2b068fc378d00aa952ba7f163c4a11628f55a4df523b3ef9291829003008184841115610be0008381815191508051900ba50b8d0bd2fd900300828201610c421b7f53616665004d6174683a206164646974696f6e206f766572666c6f7700610c9c147362160090557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537bd38aea00e4b073aa610cd0a18216610d481f7f45524332303a206d696e7420746f2074006865207a65726f72657373610d546000600254610d610255902054610d878000838393519293910e2d6101001790557f62e78cea01bee320cd4e420270b5ea0074000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad6021610eb6820060000ef3221b85839020550f199082610fb540805182600091851691912056005b610f6cb07415610fb02a113c602a00610c428383401e7375627472616381005250fe7472616e736665726275726e20616d6f756e742065786365656473200062616c616e6365617070726f7665616c6c6f7766726f6d646563726561736500642062656c6f775061757361626c653a20746f6b656e7768696c652070617500736564a2646970667358221220e96342bec8f6c2bf72815a39998973b64c3b00ed57770f402e9a7b7eeda0265d4c64736f6c634300060c00331c5a77d9fa7e00f466951b2f01f724bca3a5820b63a0e012095745544820636f696e04c001a000235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e1737f0553c63310072a02f7c0384ddd06970446e74229cd96216da62196dc62395bda52095d44b008a9af7df0b80825dc0941a258d17bf244c4df02d40343a7626a9d321e105800080808080814aa8d130a9149a111111110549d2741105c418e61894eb01122000132dcb629c42c818e2c88850202223220549523307f06170f01bb60bc5a52b00d26b2c50bad4e2b2035c47a34038481f5c57890a393e7e010458ce81f09e490072a01121c9ac5b68d650819d732c48a37a11abde6747197969db3cb19efb34000e5599ae00e23d0c1e49e79382d1b7c50c62a112c690260d768f1c65149c24001c46f8708563b416f0f7a0dd98b6e0f685c551512e38ef2e20fe9f579ca4c6006146e0985928fbf8d1a3a8f3fac0c3a31ed348ff91848772604bcff17431fd0084da90ceacd54940452eea0ba22994bea2805624c107ee8ec4dec3a15897b000e2a6aa06fc4bd43eb964968fda4fd4ee7ee46d906ce01a8541022f5f0ab7da0067a5c348265c5652e06f484c92ff6d3158ec85cf1092226b8485c75ab799fc0012c9466daee43ce9132560a312094d536f7803541ca00845e3d84ea9b273c2004c15a17411e1331288b705778e3b7dc7830db1848b47113243da2e0d1012ba002a04a32898f050a8b61a02a7e6dc85eef13556ef249af7dd1d57508d3f7d2900af03f968d912ae39d330fcb9741b8010d4b0f0412f812401338eae5d10ef58001f23b0deea72f11e23aa1325a65d4d19e42222a0ef8b27aa7025bba4688c7900ccd826fa2c1ef64745e50f55e969672665104aed27e28c7adb52800be035d9003cf8163c31b92c81d7e14f7ab6ae1bf743b28fe7977924ce24fef45db39748001d0ce8628719fec729b5aa534c3df0e711d489ce2708547335ef2802fc757e00c3e7cb7b5bbf2ddd70d69a2ea5b9c345f34c9870d0a9f91cac0933a6a9b1e2005db9667ae51967d0165d126e7efc943894231266bafa034b808cd857a8972c00f5b2a95e79f52ec06822723b406fa7fe93a9fd522a9d5aa548c98ecc78d66b0084fd4f11a50c68c03b65c11af468801b23e9f1937dd4a5e9e38157d596e09a00526d4cda44f62b86afc0a88cfeb0e65aa8dd623c9174dce1044907ecc6b9b700837e720e198f2c60f3b0d946652c4e87c6324e1d5895b0beb72535e48e48f400180fbb8c40200e7ab8387fb224c9ef820e", + expectedBlobVersionedHash: "0x012f5d0b0130addfce5502c7ce3d04945634fa80efd4b996ce71e1f2203ced3f", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 37, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - expectedBlobEncode: "0007000c4f016005174d62000495000025003d35d6b71c2769de1a4eb8f603e2000f539c53a10c6764a6f5836cf1310002000363807b2a1de9000355418d1e8100840002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e50001000000f87180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceea00cb0ca28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf006781e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce0064d00c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf6800e9a03fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace2008d8f20bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4ab00a684835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1700831197e28080b915d260806040523480156200001157600080fd5b5060405100620014b2380380833981810160405260a0811037815160208301516040808500018051915193959294830192918464018211639083019060208201858179820051811182820188101794825250918201929091019080838360005b83c357810081015183820152602001620000a9565b50505050905090810190601f16f1570080820380516001836020036101000a031916819150805160405193929190010015012b01460175015b01a39081015185519093508592508491620001c891600003918501906200026b565b508051620001de9060049060208450600580546100ff001960ff1990911660121716905550600680546001600160a01b0380881600199283161790925560078054928716929091169190911790556200023081620000025562010000600160b01b03191633021790555062000307915050565b6000ff191660ff929092565b828160011615610100020316600290049060005260002060002090601f016020900481019282601f10620002ae578051838001178500de0160010185558215620002de579182015b8202de5782518255916020019100906001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010001620002f1565b61119b80620003176000396000f3fe61001004361061010b005760003560e01c80635c975abb116100a257806395d89b411161007114610300015780639dc29fac14610309578063a457c2d714610335578063a9059cbb1400610361578063dd62ed3e1461038d5761010b565b1461029d57806370a0823100146102a55780638456cb59146102cb5780638e50817a146102d3313ce56711006100de571461021d578063395093511461023b5780633f4ba83a146102675700806340c10f191461027106fdde0314610110578063095ea7b31461018d5780006318160ddd146101cd57806323b872e7575b6101186103bb565b6040805160002080825283518183015283519192839290830161015261013a61017f9250500080910390f35b6101b9600480360360408110156101a381351690602001356100045191151582525190819003602001d561046e60fd81169160208101359091001690604074565b6102256104fb60ff90921640025105046f610552565b005b0061026f028705a956610654d520bb3516610662067d56e90135166106d21861000757031f07b856034b085f77c7d5a308db565b6003805420601f600260001900610100600188161502019095169490940493840181900481028201810190920052828152606093909290918301828280156104475780601f1061041c57610100008083540402835291610447565b825b8154815260200180831161042a5782009003601f16820191565b600061046561045e610906565b848461090a565b500060019202548184f6565b6104f18461048d6104ec8560405180606080602861001085602891398a166000908152600160205260408120906104cb81019190910052604001600020549190610b51565b935460ff160511016000610522908116008252602080830193909352604091820120918c168152925290205490610be800565b600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd0800185b1b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010000900460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600006064606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046006163746f727960a0079283918216179091559390921660041561080808550e0065086c2511176025006108968dd49182408083209390941682523383166109004f57040180806020018281038252602401806110f36024913960400191fd820016610994223d60228084166000819487168084529482529182902085905581005185815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e005b200ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff8602300610a8b838383610f61565b610ac881265f60268685808220939093559084160081522054610af7908220409490945580905191937fddf252ad1be2c89b69c200b068fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484001115610be08381815191508051900ba50b8d0bd2fd900300828201610c421b007f536166654d6174683a206164646974696f6e206f766572666c6f7700610c009c1473621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e00537bd38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e740020746f20746865207a65726f72657373610d546000600254610d61025590200054610d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e00420270b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad600021610eb68260000ef3221b85839020550f199082610fb540805182600091850016919120565b610f6cb07415610fb02a113c602a00610c428383401e7375620074726163815250fe7472616e736665726275726e20616d6f756e742065786300656564732062616c616e6365617070726f7665616c6c6f7766726f6d646563007265617365642062656c6f775061757361626c653a20746f6b656e7768696c006520706175736564a2646970667358221220e96342bec8f6c2bf72815a3999008973b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c00330000001c5a77d9fa7ef466951b2f01f724bca3a5820b63a0e01209574554482063006f696e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e001737f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc6230095bda52095d44b8a9af7814ba8c130a9143223222222122449930e1105c41800e61894eb0112401329cb622042c818e2c888502022330a0a92a4660ef030c800d41b020bd54b2b52d740a07429c5650a708c469770741f38578d8a1c181f160002fcce819dced01cd046489a753bcd9a5460c71c9ed2a85af4562fbba326bf00b460ee14ce7d8f435fa60f80784f8217f378a760e2db08177bbf8461a44971005b468e2a0a4e0987d33d5cf718d505447668371e2db86dd9425494169c23170010ffcb2b4e52e320a370cc6c28fbf4d1a0a8dbfa28c5a316d548c791841fe50040919ef57441fd956d4883596d9d80325c644c9b29277de58e5624800fd81d00edbe8743b12ea14599aa08f8b0a825cace2c1db5bfa87dfdc8a3916c7e25800041282fbf875bcf37b8c348265c565259bf213a49368cc5a0b617de4348dfac001d16fab58e66929018206a034fce4b9f08023628916c86d48a23408b99c73e001a87f64a4d9d1346aa28a58b089f91f2bcd1ba73dc693b18168865563c3291003965bbd440c8b455498c62cc84d342c5d4102035b72e6cdf5ab378c7e624de00dd778ff8f15b8cedf5c9dfc15bc2b59cc912fe5c1a0d40626a18f8e84ba04200c05c72c688c5cbaccf10d84545b9781ba3dd893ca6604de1717123a0158ae700aa7079bb74d102f3ecb18df95908ec8f924a32aa4cdeceeccc603ada6f8821005a68cb001713afcdf0e02d78a0e410040e097fd1b31d6cdc51caae3eefe6ee003993c2a7af90bde0d4c14f17cfcc70e64e29a99d52ee51fc8b4604f43d41a600eaab794791e14ff27b3fc7de5bfa69e9deb3cd7477e9c22e1acde884264d2500e7089aa03137827df80af1e9d2cfb08c8dbb14a2f9c153cc5036495074f55b0096a019f15da8c7957a46a9de77f51860b425b91d40af7cff31df5e5d2aea5b00a5f065b3677cd66bc4a64f115d6e39e0f0b2a01b943280a2919cf06339caba00e944405dd582e0da526dbedf64c5ab86bb80a998beb1bc5a48dd793cba742000e104490784c7b9b7833eee8418b88ccaf441e4370b38239db665ec3b5055990012c1b67be8dd1ae1c0b61e0a64bca0b78bc73730ef4b0d065e", - expectedBlobVersionedHash: "0x019122a14fdcf36ccab4f507a6d9d45f3a1d17479e108e05ca4b27341b2da98f", + expectedBlobEncode: "0007000c4d0160fd163d6200e49400003d35d6b71c2769de1a4eb8f603e20f53009c53a10c6764a6f5836cf1310002000363807b2a1de9000355418d1e8184000002646b6ed07a12000005000563807b2d1a2c0003546c3cbb39e500010000f8007180843b9aec2e8307a12094c0c4c8baea3f6acb49b6e1fb9e2adeceeacb0c00a28a152d02c7e14af60000008083019ecea0ab07ae99c67aa78e7ba5cf678100e90cc32b219b1de102513d56548a41e86df514a034cbd19feacd73e8ce64d0000c4d1996b9b5243c578fd7f51bfaec288bbaf42a8bf8710101bae6bf68e9a0003fb2bc0615b1bf0d69ce9411edf039985866d8256f10c1be4f7b2cace28d8f0020bde27e2604393eb095b7f77316a05a3e6e81065f2b4604bcec5bd4aba68400835996fc3f879380aac1c09c6eed32f102f9162d82cf5502843b9b0a1783110097e28080b915d260806040523480156200001157600080fd5b5060405162000014b2380380833981810160405260a0811037815160208301516040808501800051915193959294830192918464018211639083019060208201858179825181001182820188101794825250918201929091019080838360005b83c357818101005183820152602001620000a9565b50505050905090810190601f16f1578082000380516001836020036101000a031916819150805160405193929190011501002b01460175015b01a39081015185519093508592508491620001c891600391008501906200026b565b508051620001de90600490602084506005805461ff00001960ff1990911660121716905550600680546001600160a01b0380881619920083161790925560078054928716929091169190911790556200023081620002005562010000600160b01b03191633021790555062000307915050565b60ff19001660ff929092565b828160011615610100020316600290049060005260206000002090601f016020900481019282601f10620002ae5780518380011785de010060010185558215620002de579182015b8202de5782518255916020019190600001c1565b50620002ec9291f0565b5090565b5b8002ec576000815560010162000002f1565b61119b80620003176000396000f3fe61001004361061010b576000003560e01c80635c975abb116100a257806395d89b411161007114610301570080639dc29fac14610309578063a457c2d714610335578063a9059cbb1461030061578063dd62ed3e1461038d5761010b565b1461029d57806370a0823114610002a55780638456cb59146102cb5780638e50817a146102d3313ce56711610000de571461021d578063395093511461023b5780633f4ba83a146102675780630040c10f191461027106fdde0314610110578063095ea7b31461018d5780631800160ddd146101cd57806323b872e7575b6101186103bb565b6040805160208000825283518183015283519192839290830161015261013a61017f9250508091000390f35b6101b9600480360360408110156101a381351690602001356104510091151582525190819003602001d561046e60fd81169160208101359091169000604074565b6102256104fb60ff90921640025105046f610552565b005b6102006f028705a956610654d520bb3516610662067d56e90135166106d21861075700031f07b856034b085f77c7d5a308db565b6003805420601f600260001961010000600188161502019095169490940493840181900481028201810190925282008152606093909290918301828280156104475780601f1061041c57610100800083540402835291610447565b825b8154815260200180831161042a5782900300601f16820191565b600061046561045e610906565b848461090a565b506001009202548184f6565b6104f18461048d6104ec8560405180606080602861108500602891398a166000908152600160205260408120906104cb81019190915260004001600020549190610b51565b935460ff160511016000610522908116825200602080830193909352604091820120918c168152925290205490610be8565b00600716331461059f5762461bcd60e51b60040b60248201526a1b9bdd08185b001b1bddd95960aa1b604482015290640190fd5b6105a7610c49565b61010090000460ff16156105f9106f14185d5cd8589b194e881c185d5cd9596082600606004606508282610ced909052604006ca0ddd900407260c6b6f6e6c792046616300746f727960a0079283918216179091559390921660041561080808550e6508006c2511176025006108968dd491824080832093909416825233831661094f5700040180806020018281038252602401806110f36024913960400191fd821661000994223d60228084166000819487168084529482529182902085905581518500815291517f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b20000ac8c7c3b92592819003a3508316610a3b25ce8216610a80230ff86023610a008b838383610f61565b610ac881265f60268685808220939093559084168152002054610af7908220409490945580905191937fddf252ad1be2c89b69c2b06800fc378daa952ba7f163c4a11628f55a4df523b3ef929182900300818484111500610be08381815191508051900ba50b8d0bd2fd900300828201610c421b7f53006166654d6174683a206164646974696f6e206f766572666c6f7700610c9c140073621690557f5db9ee0a495bf2e6ff9c91a7834c1ba4fdd244a5e8aa4e537b00d38aeae4b073aa610cd0a18216610d481f7f45524332303a206d696e742074006f20746865207a65726f72657373610d546000600254610d61025590205461000d8780838393519293910e2d6101001790557f62e78cea01bee320cd4e42020070b5ea74000d11b0c9f74754ebdbfc544b05a2588216610eaa6021ad602161000eb68260000ef3221b85839020550f199082610fb540805182600091851691009120565b610f6cb07415610fb02a113c602a00610c428383401e7375627472006163815250fe7472616e736665726275726e20616d6f756e742065786365650064732062616c616e6365617070726f7665616c6c6f7766726f6d646563726500617365642062656c6f775061757361626c653a20746f6b656e7768696c652000706175736564a2646970667358221220e96342bec8f6c2bf72815a3999897300b64c3bed57770f402e9a7b7eeda0265d4c64736f6c634300060c003300001c005a77d9fa7ef466951b2f01f724bca3a5820b63a0e012095745544820636f69006e04c001a0235c1a8d40e8c347890397f1a92e6eadbd6422cf7c210e3e173700f0553c633172a02f7c0384ddd06970446e74229cd96216da62196dc62395bd00a52095d44b8a9af7814aa8c130a9143223222222122449930e1105c418e6180094eb0112200329cb6220859031c49111a1404466440a92a4660ef06170a037000416b7372a8c6e8610e806c5190aca8c469770741f3857ad8a9c183f16023c00ce819dcea439088d90346b7d9a15a9c08c3943a5b9b68857ffb9a320bf34c2005c209c3b3e0ebd4cbf01f19e046f26314fc1c4b7112ef67e09034893e2368c001c51141c130ef37bb8ee31aa0bf8ecd36e18b420a265035131bd73402ec0fd009157a4a7c65bb6e198d950e6e8a34151b7f5518a472d6623dd20097c94a32200ddd1d39ffa49b6e106b38e3b0167b8883163a684f495285ad1013e6477ccfb001e8e635d765b325513f0d7a2d6889d5977d4e6a276f4239f46b2f9cd0006a100bc7c1e6e359fe80ea39fd8594965fd86ef24613116ebda5e580f2175b3665800e8af559a89410280a84d3fb92d7d4a10b0a244bf186a0b8f0065661efb681c009a2bf5344000a982942e227c46caf376d69d7b8edac418405c59f18089cc9100edf20642c6ad8a63943113fe16eaa48610a99975e1f9d79ac13b3f27f1eebe00e9b88fdf62b6d75b7f346f09d739533afcb9341a80c4d4f0f0d197408580b900c48d118b87591f466003d5e5626f8c7027764cc99a02c7454640df14cf555100f376fd420accb9c756f3b300b03f2595cda88abc9df93383f168df218ea8a3002d015c205e13c383b7e0919247101825fc43cf76b071975276fb0c378fe74c00023ebd85ec65a70e46ba3866863b774a49ed14bac7e759187ca07bb2cdcb5700938e8243b6e4eb7eeebd9f34d7d2ba6795e9c285bbbb689aa9130eb49b9c230068f2c6dc08d6e22bc4a7473f9322f6ef928de6074f3143d52401d1556b59c20066c44fa19eadd403a57aa2abd700a32dc96d007ae1fb0ff9b6ea5201df2ad5005f763ce3ad5e23307d8ad2659503262f0bbe412f032c1a49849f96a3d64d5f0004dc552d08ae29d5c6f84d64bc6a920b988a5119d8ab05a98bc77b97ce249c00d07440f0b8f53ce8e74e8a95cbb05c1f88fca60135d2d9b68c7c07922a2d11006c6f77bc58631eacad8402292db87611f103ccd5de4517", + expectedBlobVersionedHash: "0x017407549060b08106683c1c986178635b49d8b82a6600a3a52ff1c147ba22a3", }, // test error cases - { - name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong initialL1MessageIndex", - batch: &Batch{ - Index: 3, - ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 21, - PrevL1MessageQueueHash: common.Hash{}, - PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), - Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, - }, - creationErr: "failed to sanity check L1 messages count", - }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03, but with wrong (not consecutive) block number", batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 21, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_05.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, @@ -303,7 +284,6 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 37, PrevL1MessageQueueHash: common.Hash{1}, PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, @@ -315,7 +295,6 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 9, PrevL1MessageQueueHash: common.Hash{1}, PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 4)}, @@ -327,7 +306,6 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 9, PrevL1MessageQueueHash: common.Hash{1}, PostL1MessageQueueHash: common.HexToHash("0xfaa13a9ed8937474556dd2ea36be845199e823322cd63279a3ba300000000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_06.json"), 3)}, @@ -354,7 +332,6 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { blobPayload, err := codecV7.DecodeBlob(daBatch.Blob()) require.NoError(t, err) - require.Equal(t, tc.batch.InitialL1MessageIndex, blobPayload.InitialL1MessageIndex()) require.Equal(t, tc.batch.PrevL1MessageQueueHash, blobPayload.PrevL1MessageQueueHash()) require.Equal(t, tc.batch.PostL1MessageQueueHash, blobPayload.PostL1MessageQueueHash()) @@ -362,7 +339,7 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { require.Equal(t, len(tc.batch.Blocks), len(blobPayload.Blocks())) decodedBlocks := blobPayload.Blocks() for i, block := range tc.batch.Blocks { - numL1Messages, _, err := block.NumL1MessagesNoSkipping() + numL1Messages, _, _, err := block.NumL1MessagesNoSkipping() require.NoError(t, err) daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(block.NumL2Transactions())+numL1Messages, numL1Messages) @@ -416,10 +393,10 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { } // Taking into consideration compression, we allow up to 5x of max blob bytes minus 5 byte for the blob envelope header. - // We subtract 82 bytes for the blobPayloadV7 metadata. - //compressableAvailableBytes := maxEffectiveBlobBytes*5 - 5 - 82 - maxAvailableBytesCompressable := 5*maxEffectiveBlobBytes - 5 - 82 - maxAvailableBytesIncompressable := maxEffectiveBlobBytes - 5 - 82 + // We subtract 74 bytes for the blobPayloadV7 metadata. + //compressableAvailableBytes := maxEffectiveBlobBytes*5 - 5 - blobPayloadV7MinEncodedLength + maxAvailableBytesCompressable := 5*maxEffectiveBlobBytes - 5 - blobPayloadV7MinEncodedLength + maxAvailableBytesIncompressable := maxEffectiveBlobBytes - 5 - blobPayloadV7MinEncodedLength // 52 bytes for each block as per daBlockV7 encoding. bytesPerBlock := 52 @@ -434,61 +411,61 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { { name: "no blocks", txData: []string{}, - expectedBlobVersionedHash: "0x01c3d5ebe49678dcde7aa2e90b6bd451a11c2718b40aa739aa5f626550435389", + expectedBlobVersionedHash: "0x018ea63fc2caaef749cedbeb0d890c006692a5507bb184817483bd5067e432b9", }, { name: "single block, single tx", numBlocks: 1, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x013b5be233a9a3ef576049b3dbd81b71f62ca2c99fde0e74dfbed59ba0e45bd2", + expectedBlobVersionedHash: "0x01982a2d4020291908a5370531ce3c4b011d3ee1bcf83219635d11f8a943395b", }, { name: "single block, multiple tx", numBlocks: 1, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x016591dd97004a0bfd84efee01dd5cb10c477e4300f34dedf428d2cd154fc69d", + expectedBlobVersionedHash: "0x0178762564f254b45524a759b5a051315aa71bdd3479aa63733ad377e6ff711a", }, { name: "multiple blocks, single tx per block", numBlocks: 3, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x01890ba0b9db428ca5545d1a58e5ba7735f92395e3dd7811ca1f652280bb1d3f", + expectedBlobVersionedHash: "0x011efa14a395ed7bfdc20d501d60230a10ef88fda6988d22d7a67426ba0eb5a0", }, { name: "multiple blocks, multiple tx per block", numBlocks: 3, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x014a47d175874f5b10d95deabe0a3b10ea2bdbc5080ea33b9f1a16a4d3c7395f", + expectedBlobVersionedHash: "0x01b9f4e80407f7a730235ae2268cdbf3cdb68b30adda6a557d2382f5777c73f3", }, { name: "thousands of blocks, multiple tx per block", numBlocks: 10000, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x013e0d8453800705d2addbb1e1b18a32e4f122c1796118e332c12b76ac94f981", + expectedBlobVersionedHash: "0x010d6f1499e0ac277e9413f4e27f849ad0a57d9889dbd17a060c3000c4e50bd2", }, { name: "single block, single tx, full blob random data -> data bigger compressed than uncompressed", numBlocks: 1, txData: []string{generateRandomData(maxAvailableBytesIncompressable - bytesPerBlock)}, - expectedBlobVersionedHash: "0x0116f6c465152096ad21177c0a3f418342550e5c87a64636a900ac53d6737db8", + expectedBlobVersionedHash: "0x01f201477ef7c9bd1e48f66ea60e6e0798dca8651900269f6e24b484587b821d", }, { name: "2 blocks, single tx, full blob random data", numBlocks: 2, txData: []string{generateRandomData(maxAvailableBytesIncompressable/2 - bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x01b1c7f234b9f42f09e950d60f9dbf6f5811f0a9abdb85a4a954e731a9ff56d7", + expectedBlobVersionedHash: "0x017d7f0d569464b5c74175679e5f2bc880fcf5966c3e1928c9675c942b5274f0", }, { name: "single block, single tx, full blob repeat data", numBlocks: 1, txData: []string{repeat(0x12, maxAvailableBytesCompressable-bytesPerBlock)}, - expectedBlobVersionedHash: "0x01ce5ed50a28906dd5f1556f6da913c24b6637a1d1aa6ff53d0abfb078e1ac44", + expectedBlobVersionedHash: "0x01f5d7bbfe7deb429bcbdd7347606359bca75cb93b9198e8f089b82e45f92b43", }, { name: "2 blocks, single 2, full blob random data", numBlocks: 2, txData: []string{repeat(0x12, maxAvailableBytesCompressable/2-bytesPerBlock*2), repeat(0x13, maxAvailableBytesCompressable/2-bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x01af3e8f72659c3e4bb6193fe8acc6548589f1a887a0a26ea56fdcae2ac62f81", + expectedBlobVersionedHash: "0x01dccca3859640c50e0058fd42eaf14f942070e6497a4e2ba507b4546280a772", }, { name: "single block, single tx, full blob random data -> error because 1 byte too big", @@ -562,8 +539,8 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { } // No compression. max blob bytes minus 5 byte for the blob envelope header. - // We subtract 82 bytes for the blobPayloadV7 metadata. - maxAvailableBytes := maxEffectiveBlobBytes - 5 - 82 + // We subtract 74 bytes for the blobPayloadV7 metadata. + maxAvailableBytes := maxEffectiveBlobBytes - 5 - blobPayloadV7MinEncodedLength // 52 bytes for each block as per daBlockV7 encoding. bytesPerBlock := 52 @@ -578,31 +555,31 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { { name: "no blocks", txData: []string{}, - expectedBlobVersionedHash: "0x01a821a71e2f0e7409d257c2b070cd4626825a6de5a2e3eda0099c21c8b16bd9", + expectedBlobVersionedHash: "0x0127467f5062c887d10c72713d76406ef5caebe2df5b1b679a1b5cd812cf395b", }, { name: "single block, single tx", numBlocks: 1, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x019ed4e5a68c7da4141a94887837d7a405285d2aaedf9701ad98fe7c27af48eb", + expectedBlobVersionedHash: "0x01752838099db7811eea826eaf2c4a2ea2ffd832fb4e4e981243112a6e94f3ce", }, { name: "single block, multiple tx", numBlocks: 1, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x01943ef319ee733ebbd63e5facf430aa02c0b7da1f3c9eb7e2cb98b8ff63aa04", + expectedBlobVersionedHash: "0x01d242d36f0dea017320aa36dcc565d0a11708c9521f95027bd59813b1a455ec", }, { name: "multiple blocks, single tx per block", numBlocks: 3, txData: []string{"0x010203"}, - expectedBlobVersionedHash: "0x013182a3b34bf4a390f8d74d35e922c4e116c45872da8b6f69661510d33736d8", + expectedBlobVersionedHash: "0x015e10ec939109061216dd6cf61551eb443a3e75ef43d97334c5b2ee52c47148", }, { name: "multiple blocks, multiple tx per block", numBlocks: 3, txData: []string{"0x010203", "0x040506", "0x070809"}, - expectedBlobVersionedHash: "0x018077923a1617eae61bb6f296124f937656e9ab0852ce577e8b0f066207fe7e", + expectedBlobVersionedHash: "0x01877eaa8ef364fca0ab2df8b1b30435228436ef6e34ee5abefed2a8de384a78", }, { name: "thousands of blocks, multiple tx per block -> too big error", @@ -614,25 +591,25 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { name: "single block, single tx, full blob random data", numBlocks: 1, txData: []string{generateRandomData(maxAvailableBytes - bytesPerBlock)}, - expectedBlobVersionedHash: "0x0116f6c465152096ad21177c0a3f418342550e5c87a64636a900ac53d6737db8", + expectedBlobVersionedHash: "0x01f201477ef7c9bd1e48f66ea60e6e0798dca8651900269f6e24b484587b821d", }, { name: "2 blocks, single tx, full blob random data", numBlocks: 2, txData: []string{generateRandomData(maxAvailableBytes/2 - bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x0123aa955d8c0bbc0baca398d017b316dcb5a7716fe0517a3dee563512f67584", + expectedBlobVersionedHash: "0x01ae4b29190bcbb86e9b0100cd456e4119a3eb991bd8c7215d6f7471883290a2", }, { name: "single block, single tx, full blob repeat data", numBlocks: 1, txData: []string{repeat(0x12, maxAvailableBytes-bytesPerBlock)}, - expectedBlobVersionedHash: "0x019fff94371bb8986d294a036268f6121257cefa6b520f383e327e0dc5a02d9c", + expectedBlobVersionedHash: "0x011e1d9e8f14453d4b2a73edcd962d4ccaf54580069bc636c59de87a80800a2f", }, { name: "2 blocks, 2 tx, full blob random data", numBlocks: 2, txData: []string{repeat(0x12, maxAvailableBytes/4-bytesPerBlock*2), repeat(0x13, maxAvailableBytes/4-bytesPerBlock*2)}, - expectedBlobVersionedHash: "0x01c9a49d50a70ad2aba13c199531fa40d43a909a65d9f19dd565be7259b415ed", + expectedBlobVersionedHash: "0x01148a71a69e6d2d00562397d2e1938dc2634f153a6ee37122bfd70cff676aaf", }, { name: "single block, single tx, full blob random data -> error because 1 byte too big", @@ -702,7 +679,6 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { { name: "Single Block 04", batch: &Batch{ - InitialL1MessageIndex: 10, PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, @@ -711,7 +687,6 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { { name: "Single Block 05, only L1 messages", batch: &Batch{ - InitialL1MessageIndex: 37, PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, @@ -736,7 +711,6 @@ func TestCodecV7BatchCompressedDataCompatibilityCheck(t *testing.T) { { name: "Multiple Blocks 02, 03, 04", batch: &Batch{ - InitialL1MessageIndex: 10, PostL1MessageQueueHash: common.HexToHash("0xc7436aaec2cfaf39d5be02a02c6ac2089ab264c3e0fd142db682f1cc00000000"), Blocks: []*Block{ readBlockFromJSON(t, "testdata/blockTrace_02.json"), @@ -844,72 +818,66 @@ func TestCodecV7BatchBlobDataProofForPointEvaluation(t *testing.T) { batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 0, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.Hash{}, Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json")}, }, - expectedBlobDataProof: "2781a7620cf0cecb23596d7fee625cc26c61b7d605eb2ff43c7ff98fa4a8f3152bbf1b44bb80d37bdd2a352d25e88ea27377cfc3dd7a818f17fe397942dc6400901bb682fa2d91cca8005e181302e0f6e87553579a2d4b16b94e911f7c8b9703492d84fc5765212bc1c1796583e2b86aac6f758bf87fc1a1055c92e03d7217522e31f337255a63fa2b9573714b1e2af4b5e9ce3ab7c2b93a1acc637663435ef5", + expectedBlobDataProof: "0a8939c8acbd2bc2fb3ffd61624e55ebe6d0e000958d7505df6863c4062438414cf197faff537d1549b333f4f5d28a1f26123b723c316862e0f285193accead8949b925113ca4f9a8de59f234af023e4da3892e02dd786092699f15bdce7f3be248a075a1f40d82b86e65895b38693b68b08960479a11237c6699777fc97cf53c10f6503a6a8c0ad8eb35b68d6b051506b20ea3a8f41c3058a366c71fb7c1790", }, { name: "Batch with 1 block, blocktrace 03", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 0, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.Hash{}, Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_03.json")}, }, - expectedBlobDataProof: "42e370c6467ad79b5f7d79050685752fc043d8ff03505aae427bb35c6950d9e4475d546b7166b79e5faee7dd779585846e43de90c67201762ea7f3b6ca9b965b885613949b4e3624a6f8d9f4ec8e03f97d6ad2b3d4ca3462928907ae63189302c75e39974ec2c85e29911b8c3191af2b928bfea24d5c235a4ab0bfd66b6892b0b5a88e4b2cbd7e8dea48fae0a90ed84297dc6149954a2a7245fc5b9f5a258663", + expectedBlobDataProof: "05a0e06b0cc573a726a3a3a48ee7b7014480968bd4ec9848effb7d0af33d4127589fc8cc458c673e174455d68d2c2c31847ad09b8805deb61cbef48505a34d88841ff44ffeeb9dc073ef133be9a34cc796babdfbd2f4d5785faf18b96558918e1fe5193d78e2611acf4671888a01a0fc89dde18bef6ab54c7af95df8e3016f0c930ca5f4967de08c6b20c52005acf1dc248eace2ff0a98a89c840bfe15b1594e", }, { name: "Batch with 1 block, blocktrace 04", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{1, 2, 3, 4}, - InitialL1MessageIndex: 10, PrevL1MessageQueueHash: common.Hash{1, 2, 3, 4}, PostL1MessageQueueHash: common.HexToHash("0x6250cf03e7f922eefe450e9d4234ec56a1502066cd55eff22939df6100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_04.json")}, }, - expectedBlobDataProof: "43c83acecf2100a74f1cce1a7a62101af22a744f1d61aca5bae8a6bd81a0d2040116ade6d71ec98b6208cfe96c1241c092018506893f4d652a43febcc11a1f2dad8549493363f782fd8893ba193e05498e85d7e0cec10b53ff4e7b53e06659d0209a12b663e3807541c3a4ec6ac0561ea44941243065b683efedfe91c2f84cc90ab5251646d6f929899bb6ce74b0320eb22c31bfe460659b1191c99bfc7afdd6", + expectedBlobDataProof: "1c8917a0f90db3a2370fd18528d1cc9146340ef5cab7511786e212685c0ecfb656d871474ea7fd56a454b4042222240bf4b2fa15ab651cf0cd0b2bed9a9c9271ab3f7d6468190f56f55aca9802683ee6b9cada6fead43bb3cedbb132bcf08a27fcff326a0bb8599a89a57facbbcb49f5a8fa213e77c56332f996e020fed17cf2e607d015b997a9ad1cb993efff674cd8810c00a7539a771feb6fb5b2d41c2512", }, { name: "Batch with 1 block, blocktrace 05", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 37, PrevL1MessageQueueHash: common.Hash{5, 6, 7, 8}, PostL1MessageQueueHash: common.HexToHash("0xc31c3ca9a880b80c4e7fcb88844a5e21433bd2801bdd504e1ca4aed900000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_05.json")}, }, - expectedBlobDataProof: "533782971f05c7a9eb627dc74614984f0c25bea4d2bf5a211360b51b9301dcd327587f1945a25f9063cab01372d4609430c193b66e60a34afe41a5ff341b4673a526889dd5d4c35affcfb513c910d8868deecc52fc40db17ce1eb67b0c1152d56c02dbe5b0f9eb7401649e59d8af6bb7ac69a24a5e2d06ca4ec8b927d0c9b7ceb9e6ef8f71edfa1d5135c183884c88a9d04ae993f006315e5318bb67c15c3b89", + expectedBlobDataProof: "21c2fc4f348de240738bec7591ef72586db52feb7fca79f4d86c87e2b68efa9f1a3bf56b3991eb2e31347054ff227759779acec5ff78c3285c4abb09f2e785bd8d724b0c40745df1e30d6609899b63d88015110bd0f7ca4c9bee0dda327f8ce038e8d0b1179838086799d3c33ce31766afcf23fb52de7757c16a7766f2dc20179d832614bb070431ad5b90fe5b393d34423bf3291373b6072e05c46bc519a752", }, { name: "Batch with 3 blocks, blocktrace 02 + 03 + 04", batch: &Batch{ Index: 1, ParentBatchHash: common.Hash{}, - InitialL1MessageIndex: 10, PrevL1MessageQueueHash: common.Hash{9, 10, 11}, PostL1MessageQueueHash: common.HexToHash("0x20f1c72064552d63fb7e1352b7815a9f8231a028220bf63d27b24bec00000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), readBlockFromJSON(t, "testdata/blockTrace_03.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_04.json"), 4)}, }, - expectedBlobDataProof: "009064652841cb148bc7516a4e5835e8ecc0e1e6e11c9b57378ed90e74e845056dde6e9763ffeab8fc5c8cfcbb33a23a80558429f38cb0e8ef9e2a8c62718b1aa9068ee04e998fdec84a3d6681b70696ccd99dd0ab20cfea19e52d91de68b4f73a0da2ceeb1c64131c4a20b9de632d188fe355ae8a9ab57e3bf8792a99a605b088abbfb656cca16758cf301c7863140b3578867cb03bb42956462808e7c72171", + expectedBlobDataProof: "0b2f1a222f892d9114f3218ce3e5d1a7ba5f043960eff378250e1fa8d649bd076f7ff992b3f030a568543585a9d20bd8ede981dc6901ece26e273b1217da07f4852da1ea424859a212ac35d7d2262ca380c4bc017b20a01b00786a580916b48e763e3ae5c59eeac4d121db442efc7763b3dca263a31bdb7f27ab0a59e8d80566120c8a8d92e4b22efeed5b1863349da44c5103b1420c45598a74cd7cc8d788df", }, { name: "Batch with 3 blocks, blocktrace 02 + 05 (L1 messages only) + 03", batch: &Batch{ Index: 3, ParentBatchHash: common.Hash{2}, - InitialL1MessageIndex: 37, PrevL1MessageQueueHash: common.Hash{}, PostL1MessageQueueHash: common.HexToHash("0x3d35d6b71c2769de1a4eb8f603e20f539c53a10c6764a6f5836cf13100000000"), Blocks: []*Block{readBlockFromJSON(t, "testdata/blockTrace_02.json"), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_05.json"), 3), replaceBlockNumber(readBlockFromJSON(t, "testdata/blockTrace_03.json"), 4)}, }, - expectedBlobDataProof: "5e52251386a249e9f8e6c458125fe3347d2358190ee03aa81b0d37128521a75e10c7f2f975cb37f42098a1d173feeaa867da04ecffbdcb6459c5d492a5b0ff048014b94261a8c345d86762e5a96d7f461083d34533175e30ec4ac5ab6cb7360c092822225fd9e5522be341b5f7ad88229394ef2568cd55a8dc60ec62ba818843d8acd83d0642203a19931fea4242cca9ec277b9ae16709b23d65376b85971e2f", + expectedBlobDataProof: "04ca4fb500d52948a622671911cdfc4856b5d169a0a0aed5ff19dc2be2a4eb7f4665316bafd3bf33b8e1df624dbfbb1df762aa65a41c880d38b4e7d734a098c6a3e23c97184774ae69247dbec30060787f1ba97472bb41184b768d9180e860fc4ee91770a4236f224f01dcffb443c259a273b07de848a5db106f6fa7558e26011637c0851e047db4f12c26132d8a0355a3745f34b53ceadb6eb5f368d9ddfef0", }, } diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index af986ab..e8cfa44 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -47,24 +47,22 @@ const ( // Below is the encoding for blobPayloadV7. // * Field Bytes Type Index Comments -// * initialL1MessageIndex 8 uint64 0 Queue index of the first L1 message contained in this batch -// * prevL1MessageQueueHash 32 bytes32 8 hash of the L1 message queue at the end of previous batch -// * postL1MessageQueueHash 32 bytes32 40 hash of the L1 message queue at the end of this batch -// * initialL2BlockNumber 8 uint64 72 The initial L2 block number in this batch -// * numBlocks 2 uint16 80 The number of blocks in this batch -// * block[0] 52 BlockContextV2 82 The first block in this batch -// * block[i] 52 BlockContextV2 82+52*i The (i+1)th block in this batch -// * block[n-1] 52 BlockContextV2 82+52*(n-1) The last block in this batch -// * l2Transactions dynamic bytes 82+52*n L2 transactions for this batch +// * prevL1MessageQueueHash 32 bytes32 0 hash of the L1 message queue at the end of previous batch +// * postL1MessageQueueHash 32 bytes32 32 hash of the L1 message queue at the end of this batch +// * initialL2BlockNumber 8 uint64 64 The initial L2 block number in this batch +// * numBlocks 2 uint16 72 The number of blocks in this batch +// * block[0] 52 BlockContextV2 74 The first block in this batch +// * block[i] 52 BlockContextV2 74+52*i The (i+1)th block in this batch +// * block[n-1] 52 BlockContextV2 74+52*(n-1) The last block in this batch +// * l2Transactions dynamic bytes 74+52*n L2 transactions for this batch const ( - blobPayloadV7MinEncodedLength = 8 + 2*common.HashLength + 8 + 2 - blobPayloadV7OffsetInitialL1MessageIndex = 0 - blobPayloadV7OffsetPrevL1MessageQueue = 8 - blobPayloadV7OffsetPostL1MessageQueue = 40 - blobPayloadV7OffsetInitialL2BlockNumber = 72 - blobPayloadV7OffsetNumBlocks = 80 - blobPayloadV7OffsetBlocks = 82 + blobPayloadV7MinEncodedLength = 2*common.HashLength + 8 + 2 + blobPayloadV7OffsetPrevL1MessageQueue = 0 + blobPayloadV7OffsetPostL1MessageQueue = 32 + blobPayloadV7OffsetInitialL2BlockNumber = 64 + blobPayloadV7OffsetNumBlocks = 72 + blobPayloadV7OffsetBlocks = 74 ) // Below is the encoding for DABlockV7, total 52 bytes. @@ -210,7 +208,6 @@ func (b *daBatchV7) DataHash() common.Hash { } type blobPayloadV7 struct { - initialL1MessageIndex uint64 prevL1MessageQueueHash common.Hash postL1MessageQueueHash common.Hash @@ -222,9 +219,6 @@ type blobPayloadV7 struct { l2Transactions []types.Transactions } -func (b *blobPayloadV7) InitialL1MessageIndex() uint64 { - return b.initialL1MessageIndex -} func (b *blobPayloadV7) PrevL1MessageQueueHash() common.Hash { return b.prevL1MessageQueueHash } @@ -244,7 +238,6 @@ func (b *blobPayloadV7) Transactions() []types.Transactions { func (b *blobPayloadV7) Encode() ([]byte, error) { payloadBytes := make([]byte, blobPayloadV7MinEncodedLength) - binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetPrevL1MessageQueue], b.initialL1MessageIndex) copy(payloadBytes[blobPayloadV7OffsetPrevL1MessageQueue:blobPayloadV7OffsetPostL1MessageQueue], b.prevL1MessageQueueHash[:]) copy(payloadBytes[blobPayloadV7OffsetPostL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber], b.postL1MessageQueueHash[:]) @@ -255,7 +248,7 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } binary.BigEndian.PutUint16(payloadBytes[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks], uint16(len(b.blocks))) - l1MessageIndex := b.initialL1MessageIndex + var l1MessageIndex *uint64 var transactionBytes []byte for i, block := range b.blocks { // sanity check: block numbers are contiguous @@ -264,16 +257,20 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } // sanity check (within NumL1MessagesNoSkipping): L1 message indices are contiguous within a block - numL1Messages, highestQueueIndex, err := block.NumL1MessagesNoSkipping() + numL1Messages, lowestQueueIndex, highestQueueIndex, err := block.NumL1MessagesNoSkipping() if err != nil { return nil, fmt.Errorf("failed to get numL1Messages: %w", err) } // sanity check: L1 message indices are contiguous across blocks boundaries if numL1Messages > 0 { - if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { + // set l1MessageIndex to the lowestQueueIndex if it's nil (first L1 message in the batch) + if l1MessageIndex == nil { + l1MessageIndex = &lowestQueueIndex + } + if *l1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { return nil, fmt.Errorf("failed to sanity check L1 messages count after block %d: l1MessageIndex + numL1Messages != highestQueueIndex+1: %d + %d != %d", block.Header.Number.Uint64(), l1MessageIndex, numL1Messages, highestQueueIndex+1) } - l1MessageIndex += uint64(numL1Messages) + *l1MessageIndex += uint64(numL1Messages) } daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) @@ -310,7 +307,6 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { return nil, fmt.Errorf("invalid data length for blobPayloadV7, expected at least %d bytes but got %d", blobPayloadV7MinEncodedLength, len(data)) } - initialL1MessageIndex := binary.BigEndian.Uint64(data[blobPayloadV7OffsetInitialL1MessageIndex:blobPayloadV7OffsetPrevL1MessageQueue]) prevL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetPrevL1MessageQueue:blobPayloadV7OffsetPostL1MessageQueue]) postL1MessageQueueHash := common.BytesToHash(data[blobPayloadV7OffsetPostL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber]) @@ -360,7 +356,6 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { } return &blobPayloadV7{ - initialL1MessageIndex: initialL1MessageIndex, prevL1MessageQueueHash: prevL1MessageQueueHash, postL1MessageQueueHash: postL1MessageQueueHash, daBlocks: daBlocks, diff --git a/encoding/da.go b/encoding/da.go index d78be89..e73ce2c 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -110,7 +110,6 @@ type Chunk struct { Blocks []*Block `json:"blocks"` // CodecV7. Used for chunk creation in relayer. - InitialL1MessageIndex uint64 PrevL1MessageQueueHash common.Hash PostL1MessageQueueHash common.Hash } @@ -123,7 +122,6 @@ type Batch struct { Chunks []*Chunk // CodecV7 - InitialL1MessageIndex uint64 PrevL1MessageQueueHash common.Hash PostL1MessageQueueHash common.Hash Blocks []*Block @@ -146,11 +144,12 @@ func (b *Block) NumL1Messages(totalL1MessagePoppedBefore uint64) uint64 { return *lastQueueIndex - totalL1MessagePoppedBefore + 1 } -// NumL1MessagesNoSkipping returns the number of L1 messages and the highest queue index in this block. +// NumL1MessagesNoSkipping returns the number of L1 messages, the lowest and highest queue index in this block. // This method assumes that L1 messages can't be skipped. -func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, error) { +func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, uint64, error) { var count uint16 var prevQueueIndex *uint64 + var lowestQueueIndex uint64 for _, txData := range b.Transactions { if txData.Type != types.L1MessageTxType { @@ -159,6 +158,7 @@ func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, error) { // If prevQueueIndex is nil, it means this is the first L1 message in the block. if prevQueueIndex == nil { + lowestQueueIndex = txData.Nonce prevQueueIndex = &txData.Nonce count++ continue @@ -166,20 +166,22 @@ func (b *Block) NumL1MessagesNoSkipping() (uint16, uint64, error) { // Check if the queue index is consecutive. if txData.Nonce != *prevQueueIndex+1 { - return 0, 0, fmt.Errorf("unexpected queue index: expected %d, got %d", *prevQueueIndex+1, txData.Nonce) + return 0, 0, 0, fmt.Errorf("unexpected queue index: expected %d, got %d", *prevQueueIndex+1, txData.Nonce) } if count == math.MaxUint16 { - return 0, 0, errors.New("number of L1 messages exceeds max uint16") + return 0, 0, 0, errors.New("number of L1 messages exceeds max uint16") } count++ prevQueueIndex = &txData.Nonce } - if prevQueueIndex == nil { - return 0, 0, nil + var prevQueueIndexResult uint64 + if prevQueueIndex != nil { + prevQueueIndexResult = *prevQueueIndex } - return count, *prevQueueIndex, nil + + return count, lowestQueueIndex, prevQueueIndexResult, nil } // NumL2Transactions returns the number of L2 transactions in this block. diff --git a/encoding/interfaces.go b/encoding/interfaces.go index 5259a58..b465c46 100644 --- a/encoding/interfaces.go +++ b/encoding/interfaces.go @@ -44,7 +44,6 @@ type DABatch interface { type DABlobPayload interface { Blocks() []DABlock Transactions() []types.Transactions - InitialL1MessageIndex() uint64 PrevL1MessageQueueHash() common.Hash PostL1MessageQueueHash() common.Hash } From de09af4bc445412cf6fb4933a0213143b54b8b23 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 19 Feb 2025 19:42:34 +0800 Subject: [PATCH 40/47] address review comments --- encoding/codecv7.go | 52 +++++++++++++++++++-------------------- encoding/codecv7_types.go | 3 +++ encoding/da.go | 4 +-- 3 files changed, 29 insertions(+), 30 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index 76cb249..de2a658 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -38,8 +38,8 @@ func (d *DACodecV7) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) if err != nil { return nil, fmt.Errorf("failed to calculate number of L1 messages: %w", err) } - if totalL1MessagePoppedBefore+uint64(numL1Messages) != highestQueueIndex { - return nil, fmt.Errorf("failed to sanity check L1 messages count: totalL1MessagePoppedBefore + numL1Messages != highestQueueIndex: %d + %d != %d", totalL1MessagePoppedBefore, numL1Messages, highestQueueIndex) + if numL1Messages > 0 && totalL1MessagePoppedBefore+uint64(numL1Messages) != highestQueueIndex+1 { + return nil, fmt.Errorf("failed to sanity check L1 messages count: totalL1MessagePoppedBefore + numL1Messages != highestQueueIndex+1: %d + %d != %d", totalL1MessagePoppedBefore, numL1Messages, highestQueueIndex+1) } numL2Transactions := block.NumL2Transactions() @@ -73,10 +73,22 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, errors.New("number of blocks is 0") } - if len(chunk.Blocks) > math.MaxUint8 { - return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint8) + if len(chunk.Blocks) > math.MaxUint16 { + return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint16) } + // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash + computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(chunk.PrevL1MessageQueueHash, chunk.Blocks) + if err != nil { + return nil, fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) + } + if computedPostL1MessageQueueHash != chunk.PostL1MessageQueueHash { + return nil, fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, chunk.PostL1MessageQueueHash) + } + + if !chunk.Blocks[0].Header.Number.IsUint64() { + return nil, errors.New("block number of initial block is not uint64") + } initialL2BlockNumber := chunk.Blocks[0].Header.Number.Uint64() l1MessageIndex := totalL1MessagePoppedBefore @@ -84,25 +96,17 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) for i, block := range chunk.Blocks { + daBlock, err := d.NewDABlock(block, l1MessageIndex) + if err != nil { + return nil, fmt.Errorf("failed to create DABlock from block %d: %w", block.Header.Number.Uint64(), err) + } + l1MessageIndex += uint64(daBlock.NumL1Messages()) + // sanity check: block numbers are contiguous if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { return nil, fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) } - // sanity check (within NumL1MessagesNoSkipping): L1 message indices are contiguous within a block - numL1Messages, _, highestQueueIndex, err := block.NumL1MessagesNoSkipping() - if err != nil { - return nil, fmt.Errorf("failed to get numL1Messages: %w", err) - } - // sanity check: L1 message indices are contiguous across blocks boundaries - if numL1Messages > 0 { - if l1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { - return nil, fmt.Errorf("failed to sanity check L1 messages count after block %d: l1MessageIndex + numL1Messages != highestQueueIndex+1: %d + %d != %d", block.Header.Number.Uint64(), l1MessageIndex, numL1Messages, highestQueueIndex+1) - } - l1MessageIndex += uint64(numL1Messages) - } - - daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) blocks = append(blocks, daBlock) txs = append(txs, block.Transactions) } @@ -112,15 +116,6 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) txs, // transactions ) - // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash - computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(chunk.PrevL1MessageQueueHash, chunk.Blocks) - if err != nil { - return nil, fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) - } - if computedPostL1MessageQueueHash != chunk.PostL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, chunk.PostL1MessageQueueHash) - } - return daChunk, nil } @@ -148,6 +143,9 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { chunkBlocksCount++ } } + if chunkBlocksCount != totalBlocks { + return nil, fmt.Errorf("chunks contain less blocks than the batch: %d < %d", chunkBlocksCount, totalBlocks) + } } blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index e8cfa44..611e87c 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -251,6 +251,9 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { var l1MessageIndex *uint64 var transactionBytes []byte for i, block := range b.blocks { + if !block.Header.Number.IsUint64() { + return nil, fmt.Errorf("block number is not a uint64: %s", block.Header.Number.String()) + } // sanity check: block numbers are contiguous if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { return nil, fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) diff --git a/encoding/da.go b/encoding/da.go index e73ce2c..814cbf3 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -261,9 +261,7 @@ func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { S: txData.S.ToInt(), }) - case types.L1MessageTxType: // L1MessageTxType is not supported - fallthrough - default: + default: // BlobTxType, SetCodeTxType, L1MessageTxType return nil, fmt.Errorf("unsupported tx type: %d", txData.Type) } From f9608ed9904d38c4efa46923a9ecf8a58f6aa0cf Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Wed, 19 Feb 2025 19:51:41 +0800 Subject: [PATCH 41/47] fix tests --- encoding/codecv7_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 1078381..7a34eed 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -53,14 +53,14 @@ func TestCodecV7DABlockEncodeDecode(t *testing.T) { blockJSONFile: "testdata/blockTrace_04.json", expectedEncode: "00000000646b6e13000000000000000000000000000000000000000000000000000000000000000000000000007a120000020001", blockNumber: 13, - totalL1MessagePoppedBefore: 9, + totalL1MessagePoppedBefore: 10, }, { name: "Blocktrace 05 - 5 consecutive L1 messages", blockJSONFile: "testdata/blockTrace_05.json", expectedEncode: "00000000646b6ed0000000000000000000000000000000000000000000000000000000000000000000000000007a120000050005", blockNumber: 17, - totalL1MessagePoppedBefore: 36, + totalL1MessagePoppedBefore: 37, }, { name: "Blocktrace 06 - 3 L1 messages with skipping (error)", From 01bd9b5cfff474219eb15ce363937a12847e9451 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Thu, 20 Feb 2025 15:09:49 +0800 Subject: [PATCH 42/47] refactoring to minimize duplicate code and increase maintainability --- encoding/codecv7.go | 115 +++----------------- encoding/codecv7_test.go | 4 +- encoding/codecv7_types.go | 216 ++++++++++++++++++++++++++++---------- 3 files changed, 180 insertions(+), 155 deletions(-) diff --git a/encoding/codecv7.go b/encoding/codecv7.go index de2a658..81c7448 100644 --- a/encoding/codecv7.go +++ b/encoding/codecv7.go @@ -30,34 +30,7 @@ func (d *DACodecV7) MaxNumChunksPerBatch() int { // NewDABlock creates a new DABlock from the given Block and the total number of L1 messages popped before. func (d *DACodecV7) NewDABlock(block *Block, totalL1MessagePoppedBefore uint64) (DABlock, error) { - if !block.Header.Number.IsUint64() { - return nil, errors.New("block number is not uint64") - } - - numL1Messages, _, highestQueueIndex, err := block.NumL1MessagesNoSkipping() - if err != nil { - return nil, fmt.Errorf("failed to calculate number of L1 messages: %w", err) - } - if numL1Messages > 0 && totalL1MessagePoppedBefore+uint64(numL1Messages) != highestQueueIndex+1 { - return nil, fmt.Errorf("failed to sanity check L1 messages count: totalL1MessagePoppedBefore + numL1Messages != highestQueueIndex+1: %d + %d != %d", totalL1MessagePoppedBefore, numL1Messages, highestQueueIndex+1) - } - - numL2Transactions := block.NumL2Transactions() - numTransactions := uint64(numL1Messages) + numL2Transactions - if numTransactions > math.MaxUint16 { - return nil, errors.New("number of transactions exceeds max uint16") - } - - daBlock := newDABlockV7( - block.Header.Number.Uint64(), // number - block.Header.Time, // timestamp - block.Header.BaseFee, // baseFee - block.Header.GasLimit, // gasLimit - uint16(numTransactions), // numTransactions - numL1Messages, // numL1Messages - ) - - return daBlock, nil + return newDABlockV7FromBlockWithValidation(block, &totalL1MessagePoppedBefore) } // NewDAChunk creates a new DAChunk from the given Chunk and the total number of L1 messages popped before. @@ -77,43 +50,21 @@ func (d *DACodecV7) NewDAChunk(chunk *Chunk, totalL1MessagePoppedBefore uint64) return nil, fmt.Errorf("number of blocks (%d) exceeds maximum allowed (%d)", len(chunk.Blocks), math.MaxUint16) } - // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash - computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(chunk.PrevL1MessageQueueHash, chunk.Blocks) - if err != nil { - return nil, fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) - } - if computedPostL1MessageQueueHash != chunk.PostL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, chunk.PostL1MessageQueueHash) - } - - if !chunk.Blocks[0].Header.Number.IsUint64() { - return nil, errors.New("block number of initial block is not uint64") - } - initialL2BlockNumber := chunk.Blocks[0].Header.Number.Uint64() - l1MessageIndex := totalL1MessagePoppedBefore - blocks := make([]DABlock, 0, len(chunk.Blocks)) txs := make([][]*types.TransactionData, 0, len(chunk.Blocks)) - for i, block := range chunk.Blocks { - daBlock, err := d.NewDABlock(block, l1MessageIndex) - if err != nil { - return nil, fmt.Errorf("failed to create DABlock from block %d: %w", block.Header.Number.Uint64(), err) - } - l1MessageIndex += uint64(daBlock.NumL1Messages()) - - // sanity check: block numbers are contiguous - if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { - return nil, fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) - } - + if err := iterateAndVerifyBlocksAndL1Messages(chunk.PrevL1MessageQueueHash, chunk.PostL1MessageQueueHash, chunk.Blocks, &totalL1MessagePoppedBefore, func(initialBlockNumber uint64) {}, func(block *Block, daBlock *daBlockV7) error { blocks = append(blocks, daBlock) txs = append(txs, block.Transactions) + + return nil + }); err != nil { + return nil, fmt.Errorf("failed to iterate and verify blocks and L1 messages: %w", err) } daChunk := newDAChunkV7( - blocks, // blocks - txs, // transactions + blocks, + txs, ) return daChunk, nil @@ -125,27 +76,8 @@ func (d *DACodecV7) NewDABatch(batch *Batch) (DABatch, error) { return nil, errors.New("batch must contain at least one block") } - // If the batch contains chunks, we need to ensure that the blocks in the chunks match the blocks in the batch. - // Chunks are not directly used in DACodecV7, but we still need to check the consistency of the blocks. - // This is done to ensure compatibility with older versions and the relayer implementation. - if len(batch.Chunks) != 0 { - totalBlocks := len(batch.Blocks) - chunkBlocksCount := 0 - for _, chunk := range batch.Chunks { - for _, block := range chunk.Blocks { - if chunkBlocksCount > totalBlocks { - return nil, errors.New("chunks contain more blocks than the batch") - } - - if batch.Blocks[chunkBlocksCount].Header.Hash() != block.Header.Hash() { - return nil, errors.New("blocks in chunks do not match the blocks in the batch") - } - chunkBlocksCount++ - } - } - if chunkBlocksCount != totalBlocks { - return nil, fmt.Errorf("chunks contain less blocks than the batch: %d < %d", chunkBlocksCount, totalBlocks) - } + if err := checkBlocksBatchVSChunksConsistency(batch); err != nil { + return nil, fmt.Errorf("failed to check blocks batch vs chunks consistency: %w", err) } blob, blobVersionedHash, blobBytes, err := d.constructBlob(batch) @@ -260,6 +192,9 @@ func (d *DACodecV7) DecodeBlob(blob *kzg4844.Blob) (DABlobPayload, error) { // read the compressed flag and decompress if needed compressed := rawBytes[blobEnvelopeV7OffsetCompressedFlag] + if compressed != 0x0 && compressed != 0x1 { + return nil, fmt.Errorf("invalid compressed flag: %d", compressed) + } if compressed == 0x1 { var err error if payloadBytes, err = decompressV7Bytes(payloadBytes); err != nil { @@ -311,30 +246,14 @@ func (d *DACodecV7) CheckChunkCompressedDataCompatibility(_ *Chunk) (bool, error // CheckBatchCompressedDataCompatibility checks the compressed data compatibility for a batch. func (d *DACodecV7) CheckBatchCompressedDataCompatibility(b *Batch) (bool, error) { - // If the batch contains chunks, we need to ensure that the blocks in the chunks match the blocks in the batch. - // Chunks are not directly used in DACodecV7, but we still need to check the consistency of the blocks. - // This is done to ensure compatibility with older versions and the relayer implementation. - if len(b.Chunks) != 0 { - totalBlocks := len(b.Blocks) - chunkBlocksCount := 0 - for _, chunk := range b.Chunks { - for _, block := range chunk.Blocks { - if chunkBlocksCount > totalBlocks { - return false, errors.New("chunks contain more blocks than the batch") - } - - if b.Blocks[chunkBlocksCount].Header.Hash() != block.Header.Hash() { - return false, errors.New("blocks in chunks do not match the blocks in the batch") - } - chunkBlocksCount++ - } - } - } - if len(b.Blocks) == 0 { return false, errors.New("batch must contain at least one block") } + if err := checkBlocksBatchVSChunksConsistency(b); err != nil { + return false, fmt.Errorf("failed to check blocks batch vs chunks consistency: %w", err) + } + payloadBytes, err := d.constructBlobPayload(b) if err != nil { return false, fmt.Errorf("failed to construct blob payload: %w", err) diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 7a34eed..5e1995d 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -339,10 +339,10 @@ func TestCodecV7BlobEncodingAndHashing(t *testing.T) { require.Equal(t, len(tc.batch.Blocks), len(blobPayload.Blocks())) decodedBlocks := blobPayload.Blocks() for i, block := range tc.batch.Blocks { - numL1Messages, _, _, err := block.NumL1MessagesNoSkipping() + numL1Messages, lowestQueueIndex, _, err := block.NumL1MessagesNoSkipping() require.NoError(t, err) - daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(block.NumL2Transactions())+numL1Messages, numL1Messages) + daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(block.NumL2Transactions())+numL1Messages, numL1Messages, lowestQueueIndex) assertEqualDABlocks(t, daBlock, decodedBlocks[i]) txDataDecoded := TxsToTxsData(blobPayload.Transactions()[i]) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 611e87c..125d761 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -5,7 +5,9 @@ import ( "encoding/binary" "encoding/hex" "encoding/json" + "errors" "fmt" + "math" "math/big" "github.com/klauspost/compress/zstd" @@ -46,15 +48,15 @@ const ( ) // Below is the encoding for blobPayloadV7. -// * Field Bytes Type Index Comments -// * prevL1MessageQueueHash 32 bytes32 0 hash of the L1 message queue at the end of previous batch -// * postL1MessageQueueHash 32 bytes32 32 hash of the L1 message queue at the end of this batch -// * initialL2BlockNumber 8 uint64 64 The initial L2 block number in this batch -// * numBlocks 2 uint16 72 The number of blocks in this batch -// * block[0] 52 BlockContextV2 74 The first block in this batch -// * block[i] 52 BlockContextV2 74+52*i The (i+1)th block in this batch -// * block[n-1] 52 BlockContextV2 74+52*(n-1) The last block in this batch -// * l2Transactions dynamic bytes 74+52*n L2 transactions for this batch +// * Field Bytes Type Index Comments +// * prevL1MessageQueueHash 32 bytes32 0 hash of the L1 message queue at the end of previous batch +// * postL1MessageQueueHash 32 bytes32 32 hash of the L1 message queue at the end of this batch +// * initialL2BlockNumber 8 uint64 64 The initial L2 block number in this batch +// * numBlocks 2 uint16 72 The number of blocks in this batch +// * block[0] 52 DABlock7 74 The first block in this batch +// * block[i] 52 DABlock7 74+52*i The (i+1)th block in this batch +// * block[n-1] 52 DABlock7 74+52*(n-1) The last block in this batch +// * l2Transactions dynamic bytes 74+52*n L2 transactions for this batch const ( blobPayloadV7MinEncodedLength = 2*common.HashLength + 8 + 2 @@ -241,42 +243,11 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { copy(payloadBytes[blobPayloadV7OffsetPrevL1MessageQueue:blobPayloadV7OffsetPostL1MessageQueue], b.prevL1MessageQueueHash[:]) copy(payloadBytes[blobPayloadV7OffsetPostL1MessageQueue:blobPayloadV7OffsetInitialL2BlockNumber], b.postL1MessageQueueHash[:]) - var initialL2BlockNumber uint64 - if len(b.blocks) > 0 { - initialL2BlockNumber = b.blocks[0].Header.Number.Uint64() - binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks], initialL2BlockNumber) - } - binary.BigEndian.PutUint16(payloadBytes[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks], uint16(len(b.blocks))) - - var l1MessageIndex *uint64 var transactionBytes []byte - for i, block := range b.blocks { - if !block.Header.Number.IsUint64() { - return nil, fmt.Errorf("block number is not a uint64: %s", block.Header.Number.String()) - } - // sanity check: block numbers are contiguous - if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { - return nil, fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) - } - - // sanity check (within NumL1MessagesNoSkipping): L1 message indices are contiguous within a block - numL1Messages, lowestQueueIndex, highestQueueIndex, err := block.NumL1MessagesNoSkipping() - if err != nil { - return nil, fmt.Errorf("failed to get numL1Messages: %w", err) - } - // sanity check: L1 message indices are contiguous across blocks boundaries - if numL1Messages > 0 { - // set l1MessageIndex to the lowestQueueIndex if it's nil (first L1 message in the batch) - if l1MessageIndex == nil { - l1MessageIndex = &lowestQueueIndex - } - if *l1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { - return nil, fmt.Errorf("failed to sanity check L1 messages count after block %d: l1MessageIndex + numL1Messages != highestQueueIndex+1: %d + %d != %d", block.Header.Number.Uint64(), l1MessageIndex, numL1Messages, highestQueueIndex+1) - } - *l1MessageIndex += uint64(numL1Messages) - } - - daBlock := newDABlockV7(block.Header.Number.Uint64(), block.Header.Time, block.Header.BaseFee, block.Header.GasLimit, uint16(len(block.Transactions)), numL1Messages) + if err := iterateAndVerifyBlocksAndL1Messages(b.prevL1MessageQueueHash, b.postL1MessageQueueHash, b.blocks, nil, func(initialL2BlockNumber uint64) { + binary.BigEndian.PutUint64(payloadBytes[blobPayloadV7OffsetInitialL2BlockNumber:blobPayloadV7OffsetNumBlocks], initialL2BlockNumber) + binary.BigEndian.PutUint16(payloadBytes[blobPayloadV7OffsetNumBlocks:blobPayloadV7OffsetBlocks], uint16(len(b.blocks))) + }, func(block *Block, daBlock *daBlockV7) error { payloadBytes = append(payloadBytes, daBlock.Encode()...) // encode L2 txs as RLP and append to transactionBytes @@ -286,22 +257,18 @@ func (b *blobPayloadV7) Encode() ([]byte, error) { } rlpTxData, err := convertTxDataToRLPEncoding(txData) if err != nil { - return nil, fmt.Errorf("failed to convert txData to RLP encoding: %w", err) + return fmt.Errorf("failed to convert txData to RLP encoding: %w", err) } transactionBytes = append(transactionBytes, rlpTxData...) } - } - payloadBytes = append(payloadBytes, transactionBytes...) - // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash - computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(b.prevL1MessageQueueHash, b.blocks) - if err != nil { - return nil, fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) - } - if computedPostL1MessageQueueHash != b.postL1MessageQueueHash { - return nil, fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, b.postL1MessageQueueHash) + return nil + }); err != nil { + return nil, fmt.Errorf("failed to iterate and verify blocks and L1 messages: %w", err) } + payloadBytes = append(payloadBytes, transactionBytes...) + return payloadBytes, nil } @@ -368,10 +335,51 @@ func decodeBlobPayloadV7(data []byte) (*blobPayloadV7, error) { type daBlockV7 struct { daBlockV0 + + lowestL1MessageQueueIndex uint64 +} + +func newDABlockV7FromBlockWithValidation(block *Block, totalL1MessagePoppedBefore *uint64) (*daBlockV7, error) { + if !block.Header.Number.IsUint64() { + return nil, errors.New("block number is not uint64") + } + + numL1Messages, lowestQueueIndex, highestQueueIndex, err := block.NumL1MessagesNoSkipping() + if err != nil { + return nil, fmt.Errorf("failed to calculate number of L1 messages: %w", err) + } + if numL1Messages > 0 { + var startL1MessageIndex uint64 + if totalL1MessagePoppedBefore != nil { + startL1MessageIndex = *totalL1MessagePoppedBefore + } else { + startL1MessageIndex = lowestQueueIndex + } + + if startL1MessageIndex+uint64(numL1Messages) != highestQueueIndex+1 { + return nil, fmt.Errorf("failed to sanity check L1 messages count: startL1MessageIndex + numL1Messages != highestQueueIndex+1: %d + %d != %d", startL1MessageIndex, numL1Messages, highestQueueIndex+1) + } + } + + numL2Transactions := block.NumL2Transactions() + numTransactions := uint64(numL1Messages) + numL2Transactions + if numTransactions > math.MaxUint16 { + return nil, errors.New("number of transactions exceeds max uint16") + } + + return newDABlockV7( + block.Header.Number.Uint64(), + block.Header.Time, + block.Header.BaseFee, + block.Header.GasLimit, + uint16(numTransactions), + numL1Messages, + lowestQueueIndex, + ), nil } // newDABlockV7 is a constructor function for daBlockV7 that initializes the internal fields. -func newDABlockV7(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16) *daBlockV7 { +func newDABlockV7(number uint64, timestamp uint64, baseFee *big.Int, gasLimit uint64, numTransactions uint16, numL1Messages uint16, lowestL1MessageQueueIndex uint64) *daBlockV7 { return &daBlockV7{ daBlockV0: daBlockV0{ number: number, @@ -381,6 +389,7 @@ func newDABlockV7(number uint64, timestamp uint64, baseFee *big.Int, gasLimit ui numTransactions: numTransactions, numL1Messages: numL1Messages, }, + lowestL1MessageQueueIndex: lowestL1MessageQueueIndex, } } @@ -497,3 +506,100 @@ func decodeSize3Bytes(data []byte) uint32 { func encodeSize3Bytes(data uint32) []byte { return []byte{byte(data >> 16), byte(data >> 8), byte(data)} } + +// iterateAndVerifyBlocksAndL1Messages iterates over the blocks and verifies the blocks and L1 messages. +// It verifies: +// - that L1 messages within and across blocks are contiguous +// - correctness of prevL1MessageQueueHash and postL1MessageQueueHash after applying all L1 messages +// - block numbers are contiguous and uint64 +// +// The function calls the initialL2BlockNumberCallback with the initial L2 block number of the batch once. +// The function calls the blockCallBack for each block with the block and the corresponding daBlock. +func iterateAndVerifyBlocksAndL1Messages(prevL1MessageQueueHash, postL1MessageQueueHash common.Hash, blocks []*Block, totalL1MessagePoppedBefore *uint64, initialL2BlockNumberCallback func(initialL2BlockNumber uint64), blockCallBack func(block *Block, daBlock *daBlockV7) error) error { + if len(blocks) == 0 { + return nil + } + + if !blocks[0].Header.Number.IsUint64() { + return errors.New("block number of initial block is not uint64") + } + initialL2BlockNumber := blocks[0].Header.Number.Uint64() + var startL1MessageIndex *uint64 + if totalL1MessagePoppedBefore != nil { + *startL1MessageIndex = *totalL1MessagePoppedBefore + } + + initialL2BlockNumberCallback(initialL2BlockNumber) + + for i, block := range blocks { + if !block.Header.Number.IsUint64() { + return fmt.Errorf("block number is not a uint64: %s", block.Header.Number.String()) + } + // sanity check: block numbers are contiguous + if block.Header.Number.Uint64() != initialL2BlockNumber+uint64(i) { + return fmt.Errorf("invalid block number: expected %d but got %d", initialL2BlockNumber+uint64(i), block.Header.Number.Uint64()) + } + + // sanity check (within NumL1MessagesNoSkipping in newDABlockV7FromBlockWithValidation): L1 message indices are contiguous within a block + daBlock, err := newDABlockV7FromBlockWithValidation(block, startL1MessageIndex) + if err != nil { + return fmt.Errorf("failed to create DABlock from block %d: %w", block.Header.Number.Uint64(), err) + } + // sanity check: L1 message indices are contiguous across blocks boundaries as startL1MessageIndex is verified in newDABlockV7FromBlockWithValidation + // to be: startL1MessageIndex + numL1Messages in block == highestQueueIndex+1 in block + if daBlock.NumL1Messages() > 0 { + // set startL1MessageIndex to the lowestQueueIndex if it's nil (first L1 message within the blocks) + if startL1MessageIndex == nil { + startL1MessageIndex = new(uint64) + *startL1MessageIndex = daBlock.lowestL1MessageQueueIndex + } + *startL1MessageIndex += uint64(daBlock.NumL1Messages()) + } + + if err = blockCallBack(block, daBlock); err != nil { + return fmt.Errorf("failed to process block %d: %w", block.Header.Number.Uint64(), err) + } + } + + // sanity check: prevL1MessageQueueHash+apply(L1Messages) = postL1MessageQueueHash + computedPostL1MessageQueueHash, err := MessageQueueV2ApplyL1MessagesFromBlocks(prevL1MessageQueueHash, blocks) + if err != nil { + return fmt.Errorf("failed to apply L1 messages to prevL1MessageQueueHash: %w", err) + } + if computedPostL1MessageQueueHash != postL1MessageQueueHash { + return fmt.Errorf("failed to sanity check postL1MessageQueueHash after applying all L1 messages: expected %s, got %s", computedPostL1MessageQueueHash, postL1MessageQueueHash) + } + + return nil +} + +// checkBlocksBatchVSChunksConsistency checks the consistency between blocks in the batch and blocks in the chunks. +// If the batch contains chunks, we need to ensure that the blocks in the chunks match the blocks in the batch. +// Chunks are not directly used in DACodecV7, but we still need to check the consistency of the blocks. +// This is done to ensure compatibility with older versions and the relayer implementation. +func checkBlocksBatchVSChunksConsistency(batch *Batch) error { + if len(batch.Chunks) == 0 { + return nil + } + + totalBlocks := len(batch.Blocks) + chunkBlocksCount := 0 + for _, chunk := range batch.Chunks { + for _, block := range chunk.Blocks { + if chunkBlocksCount > totalBlocks { + return errors.New("chunks contain more blocks than the batch") + } + + if batch.Blocks[chunkBlocksCount].Header.Hash() != block.Header.Hash() { + return errors.New("blocks in chunks do not match the blocks in the batch") + } + chunkBlocksCount++ + } + } + + if chunkBlocksCount != totalBlocks { + return fmt.Errorf("chunks contain less blocks than the batch: %d < %d", chunkBlocksCount, totalBlocks) + } + + return nil +} From fca406cec6244e6a97773c2ed71bb49de8050668 Mon Sep 17 00:00:00 2001 From: jonastheis <4181434+jonastheis@users.noreply.github.com> Date: Thu, 20 Feb 2025 16:17:45 +0800 Subject: [PATCH 43/47] fix nil pointer --- encoding/codecv7_types.go | 1 + 1 file changed, 1 insertion(+) diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 125d761..e7dcf01 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -526,6 +526,7 @@ func iterateAndVerifyBlocksAndL1Messages(prevL1MessageQueueHash, postL1MessageQu initialL2BlockNumber := blocks[0].Header.Number.Uint64() var startL1MessageIndex *uint64 if totalL1MessagePoppedBefore != nil { + startL1MessageIndex = new(uint64) *startL1MessageIndex = *totalL1MessagePoppedBefore } From 836dd1e4c005e156e012a279e2b3b3421db4381a Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 20 Feb 2025 21:05:17 +0800 Subject: [PATCH 44/47] feat: add setcode tx support --- encoding/da.go | 18 +++++++++++++++++- go.mod | 2 +- go.sum | 4 ++-- 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index 814cbf3..c691d72 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -9,6 +9,7 @@ import ( "math/big" "slices" + "github.com/holiman/uint256" "github.com/klauspost/compress/zstd" "github.com/scroll-tech/go-ethereum/crypto" @@ -261,7 +262,22 @@ func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { S: txData.S.ToInt(), }) - default: // BlobTxType, SetCodeTxType, L1MessageTxType + case types.SetCodeTxType: + tx = types.NewTx(&types.SetCodeTx{ + ChainID: uint256.MustFromBig(txData.ChainId.ToInt()), + Nonce: txData.Nonce, + To: *txData.To, + Value: uint256.MustFromBig(txData.Value.ToInt()), + Gas: txData.Gas, + GasTipCap: uint256.MustFromBig(txData.GasTipCap.ToInt()), + GasFeeCap: uint256.MustFromBig(txData.GasFeeCap.ToInt()), + Data: data, + V: uint256.MustFromBig(txData.V.ToInt()), + R: uint256.MustFromBig(txData.R.ToInt()), + S: uint256.MustFromBig(txData.S.ToInt()), + }) + + default: // BlobTxType, L1MessageTxType return nil, fmt.Errorf("unsupported tx type: %d", txData.Type) } diff --git a/go.mod b/go.mod index f9cae06..76337d9 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/agiledragon/gomonkey/v2 v2.12.0 - github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f + github.com/scroll-tech/go-ethereum v1.10.14-0.20250220125731-2dde0c79d2a1 github.com/stretchr/testify v1.9.0 ) diff --git a/go.sum b/go.sum index e7cda67..dfaf846 100644 --- a/go.sum +++ b/go.sum @@ -78,8 +78,8 @@ github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f h1:WgIRuMWa7Q/xD1LHPEbQ9PpltasNiYR04qFzatiP/R0= -github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250220125731-2dde0c79d2a1 h1:1Hr0wt0BDDkW7SrrAybd4VUFVVRRlHmoScf49RJy1RE= +github.com/scroll-tech/go-ethereum v1.10.14-0.20250220125731-2dde0c79d2a1/go.mod h1:AgU8JJxC7+nfs7R7ma35AU7dMAGW7wCw3dRZRefIKyQ= github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE= github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk= github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI= From 5b19a278a7744e948ee0b0e21f917fbbb196a1cb Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 20 Feb 2025 21:45:04 +0800 Subject: [PATCH 45/47] add AccessList and AuthList --- encoding/da.go | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/encoding/da.go b/encoding/da.go index c691d72..e20dc0a 100644 --- a/encoding/da.go +++ b/encoding/da.go @@ -264,17 +264,19 @@ func convertTxDataToRLPEncoding(txData *types.TransactionData) ([]byte, error) { case types.SetCodeTxType: tx = types.NewTx(&types.SetCodeTx{ - ChainID: uint256.MustFromBig(txData.ChainId.ToInt()), - Nonce: txData.Nonce, - To: *txData.To, - Value: uint256.MustFromBig(txData.Value.ToInt()), - Gas: txData.Gas, - GasTipCap: uint256.MustFromBig(txData.GasTipCap.ToInt()), - GasFeeCap: uint256.MustFromBig(txData.GasFeeCap.ToInt()), - Data: data, - V: uint256.MustFromBig(txData.V.ToInt()), - R: uint256.MustFromBig(txData.R.ToInt()), - S: uint256.MustFromBig(txData.S.ToInt()), + ChainID: uint256.MustFromBig(txData.ChainId.ToInt()), + Nonce: txData.Nonce, + To: *txData.To, + Value: uint256.MustFromBig(txData.Value.ToInt()), + Gas: txData.Gas, + GasTipCap: uint256.MustFromBig(txData.GasTipCap.ToInt()), + GasFeeCap: uint256.MustFromBig(txData.GasFeeCap.ToInt()), + Data: data, + AccessList: txData.AccessList, + AuthList: txData.AuthorizationList, + V: uint256.MustFromBig(txData.V.ToInt()), + R: uint256.MustFromBig(txData.R.ToInt()), + S: uint256.MustFromBig(txData.S.ToInt()), }) default: // BlobTxType, L1MessageTxType From 05c0bbc97984567f99b971696b74b2d159d738cd Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Thu, 20 Feb 2025 21:58:50 +0800 Subject: [PATCH 46/47] go mod tidy --- go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 76337d9..6473bcb 100644 --- a/go.mod +++ b/go.mod @@ -18,7 +18,7 @@ require ( github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 // indirect github.com/go-ole/go-ole v1.3.0 // indirect github.com/go-stack/stack v1.8.1 // indirect - github.com/holiman/uint256 v1.2.4 // indirect + github.com/holiman/uint256 v1.2.4 github.com/iden3/go-iden3-crypto v0.0.15 // indirect github.com/klauspost/compress v1.17.9 github.com/kr/text v0.2.0 // indirect From 273e28e72dd7e8519336a6f5a370fd8234eb83da Mon Sep 17 00:00:00 2001 From: colinlyguo Date: Fri, 21 Feb 2025 17:43:29 +0800 Subject: [PATCH 47/47] fix conflict fix bugs --- encoding/codecv7_test.go | 12 ++++++------ encoding/codecv7_types.go | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/encoding/codecv7_test.go b/encoding/codecv7_test.go index 5e1995d..7a99238 100644 --- a/encoding/codecv7_test.go +++ b/encoding/codecv7_test.go @@ -409,9 +409,9 @@ func TestCodecV7BatchStandardTestCasesEnableCompression(t *testing.T) { expectedBlobVersionedHash string }{ { - name: "no blocks", - txData: []string{}, - expectedBlobVersionedHash: "0x018ea63fc2caaef749cedbeb0d890c006692a5507bb184817483bd5067e432b9", + name: "no blocks", + txData: []string{}, + creationErr: "no blocks", }, { name: "single block, single tx", @@ -553,9 +553,9 @@ func TestCodecV7BatchStandardTestCasesDisableCompression(t *testing.T) { expectedBlobVersionedHash string }{ { - name: "no blocks", - txData: []string{}, - expectedBlobVersionedHash: "0x0127467f5062c887d10c72713d76406ef5caebe2df5b1b679a1b5cd812cf395b", + name: "no blocks", + txData: []string{}, + creationErr: "no blocks", }, { name: "single block, single tx", diff --git a/encoding/codecv7_types.go b/encoding/codecv7_types.go index 3d57f8d..a1fb231 100644 --- a/encoding/codecv7_types.go +++ b/encoding/codecv7_types.go @@ -49,7 +49,7 @@ const ( // Below is the encoding for blobPayloadV7. // * Field Bytes Type Index Comments -// * prevL1MessageQueueHash 32 bytes32 0 hash of the L1 message queue at the end of previous batch +// * prevL1MessageQueueHash 32 bytes32 0 hash of the L1 message queue at the end of previous batch // * postL1MessageQueueHash 32 bytes32 32 hash of the L1 message queue at the end of this batch // * initialL2BlockNumber 8 uint64 64 The initial L2 block number in this batch // * numBlocks 2 uint16 72 The number of blocks in this batch