Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(protocol): support delayed forced inclusion of txs #18826

Draft
wants to merge 42 commits into
base: pacaya_fork
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 33 commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
e4a7d10
emit blob hashes in event
dantaik Jan 22, 2025
56a6e93
Update TaikoInbox.sol
dantaik Jan 22, 2025
c38afbb
refactor
dantaik Jan 22, 2025
4bde037
Update TaikoInbox.sol
dantaik Jan 22, 2025
bd2202f
Update Layer1Test.sol
dantaik Jan 22, 2025
e723138
BatchInfo
dantaik Jan 22, 2025
5a5f2b1
more
dantaik Jan 22, 2025
1266339
fix
dantaik Jan 22, 2025
8519a57
fix
dantaik Jan 22, 2025
24d0029
Update TaikoInbox.sol
dantaik Jan 22, 2025
34ea458
forge fmt & update contract layout tables
dantaik Jan 22, 2025
267719e
reorder
dantaik Jan 22, 2025
08356e4
Update TaikoInbox.sol
dantaik Jan 22, 2025
321002d
Update ITaikoInbox.sol
dantaik Jan 22, 2025
19e00ab
more
dantaik Jan 22, 2025
5a59c94
more
dantaik Jan 22, 2025
5b39351
more
dantaik Jan 22, 2025
95df593
Update ITaikoInbox.sol
dantaik Jan 22, 2025
b27e27d
Merge branch 'pacaya_fork' into emit_blob_hashes_in_event
dantaik Jan 23, 2025
40dcfa8
show case an idea
dantaik Jan 23, 2025
84d4493
Update TaikoInboxWithForcedTxInclusion.sol
dantaik Jan 23, 2025
69848fe
split
dantaik Jan 23, 2025
e1290d4
Update TaikoInboxWithForcedTxInclusion.sol
dantaik Jan 23, 2025
6eaa54b
Update TaikoInboxWithForcedTxInclusion.sol
dantaik Jan 23, 2025
f38b07e
Update TaikoInbox.sol
dantaik Jan 23, 2025
fec04d5
forge fmt & update contract layout tables
dantaik Jan 23, 2025
3095e13
more
dantaik Jan 23, 2025
48479b5
support blob hashes as parameters
dantaik Jan 23, 2025
cc6274b
Merge branch 'emit_blob_hashes_in_event' into forced_tx_inclusion
dantaik Jan 23, 2025
e532efe
removed 1 TODO
dantaik Jan 23, 2025
022849d
Update TaikoInboxWithForcedTxInclusion.sol
dantaik Jan 23, 2025
7a8e392
Merge branch 'pacaya_fork' into emit_blob_hashes_in_event
dantaik Jan 23, 2025
da354f5
Merge branch 'emit_blob_hashes_in_event' into forced_tx_inclusion
dantaik Jan 23, 2025
f9885e9
Update ITaikoInbox.sol
dantaik Jan 23, 2025
e5bf3f1
Merge branch 'pacaya_fork' into forced_tx_inclusion
dantaik Jan 24, 2025
40b0db8
feat(protocol): forced inclusion store (#18829)
cyberhorsey Jan 24, 2025
8fc98eb
merge pacaya_fork branch, update resolvers
cyberhorsey Jan 24, 2025
cf15174
add the forced inclusion inbox as a WL proposer
cyberhorsey Jan 24, 2025
b96c0ef
Merge branch 'pacaya_fork' into forced_tx_inclusion
dantaik Jan 25, 2025
c64a03b
fee => feeInGwei
dantaik Jan 27, 2025
f9fa4b4
move new files to a new dir
dantaik Jan 27, 2025
06225d8
Update gen-layouts.sh
dantaik Jan 27, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions packages/protocol/contracts/layer1/based/IForcedInclusionStore.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.24;

/// @title IForcedInclusionStore
/// @custom:security-contact security@taiko.xyz
interface IForcedInclusionStore {
cyberhorsey marked this conversation as resolved.
Show resolved Hide resolved
struct ForcedInclusion {
bytes32 blobHash;
uint64 id;
uint32 blobByteOffset;
uint32 blobByteSize;
uint256 priorityFee;
}

/// @dev Consume a forced inclusion request.
/// The inclusion request must be marked as process and the priority fee must be paid to the
/// caller.
function consumeForcedInclusion() external returns (ForcedInclusion memory);
}
59 changes: 41 additions & 18 deletions packages/protocol/contracts/layer1/based/ITaikoInbox.sol
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,22 @@ interface ITaikoInbox {
}

struct BlobParams {
// The hashes of the blob. Note that if this array is not empty. `firstBlobIndex` and
// `numBlobs` must be 0.
bytes32[] blobHashes;
// The index of the first blob in this batch.
uint8 firstBlobIndex;
// The number of blobs in this batch. Blobs are initially concatenated and subsequently
// decompressed via Zlib.
uint8 numBlobs;
// The byte offset of the blob in the batch.
uint32 byteOffset;
// The byte size of the blob.
uint32 byteSize;
}

struct BlobParams2 {
bytes32 blobhash;
uint32 byteOffset;
uint32 byteSize;
}
Expand All @@ -53,25 +64,35 @@ interface ITaikoInbox {
BlockParams[] blocks;
}

struct BatchMetadata {
bytes32 txListHash;
/// @dev This struct holds batch information essential for constructing blocks offchain, but it
/// does not include data necessary for batch proving.
struct BatchInfo {
bytes32 txsHash;
// Data to build L2 blocks
BlockParams[] blocks;
bytes32[] blobHashes;
bytes32 extraData;
address coinbase;
uint64 batchId;
uint32 gasLimit;
uint64 lastBlockTimestamp;
bytes32 parentMetaHash;
address proposer;
uint96 livenessBond;
uint64 proposedAt; // Used by node/client
uint64 proposedIn; // Used by node/client
uint32 blobByteOffset;
uint32 blobByteSize;
uint32 gasLimit;
// Data for the L2 anchor transaction, shared by all blocks in the batch
uint64 anchorBlockId;
// corresponds to the `_anchorStateRoot` parameter in the anchor transaction.
// The batch's validity proof shall verify the integrity of these two values.
bytes32 anchorBlockHash;
bytes32[] signalSlots;
bytes32 anchorInput;
BlockParams[] blocks;
BlobParams blobParams;
LibSharedData.BaseFeeConfig baseFeeConfig;
bytes32[] signalSlots;
}

/// @dev This struct holds batch metadata essential for proving the batch.
struct BatchMetadata {
bytes32 infoHash;
address proposer;
uint64 batchId;
uint64 proposedAt; // Used by node/client
}

/// @notice Struct representing transition to be proven.
Expand Down Expand Up @@ -208,10 +229,10 @@ interface ITaikoInbox {
event Stats2Updated(Stats2 stats2);

/// @notice Emitted when a batch is proposed.
/// @param info The info of the proposed batch.
/// @param meta The metadata of the proposed batch.
/// @param calldataUsed Whether calldata is used for txList DA.
/// @param txListInCalldata The tx list in calldata.
event BatchProposed(BatchMetadata meta, bool calldataUsed, bytes txListInCalldata);
/// @param txList The tx list in calldata.
event BatchProposed(BatchInfo info, BatchMetadata meta, bytes txList);

/// @notice Emitted when multiple transitions are proved.
/// @param verifier The address of the verifier.
Expand Down Expand Up @@ -250,6 +271,7 @@ interface ITaikoInbox {
error CustomProposerNotAllowed();
error EtherNotPaidAsBond();
error InsufficientBond();
error InvalidBlobParams();
error InvalidGenesisBlockHash();
error InvalidParams();
error InvalidTransitionBlockHash();
Expand All @@ -259,7 +281,7 @@ interface ITaikoInbox {
error MsgValueNotZero();
error NoBlocksToProve();
error NotFirstProposal();
error NotPreconfRouter();
error NotInboxOperator();
error ParentMetaHashMismatch();
error SameTransition();
error SignalNotSent();
Expand All @@ -276,13 +298,14 @@ interface ITaikoInbox {
/// @param _params ABI-encoded BlockParams.
/// @param _txList The transaction list in calldata. If the txList is empty, blob will be used
/// for data availability.
/// @return Batch metadata.
/// @return info_ The info of the proposed batch.
/// @return meta_ The metadata of the proposed batch.
function proposeBatch(
bytes calldata _params,
bytes calldata _txList
)
external
returns (BatchMetadata memory);
returns (BatchInfo memory info_, BatchMetadata memory meta_);

/// @notice Proves state transitions for multiple batches with a single aggregated proof.
/// @param _params ABI-encoded parameter containing:
Expand Down
117 changes: 76 additions & 41 deletions packages/protocol/contracts/layer1/based/TaikoInbox.sol
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,15 @@ abstract contract TaikoInbox is EssentialContract, ITaikoInbox, ITaiko, IFork {
/// @param _params ABI-encoded BlockParams.
/// @param _txList The transaction list in calldata. If the txList is empty, blob will be used
/// for data availability.
/// @return meta_ Batch metadata.
/// @return info_ The info of the proposed batch.
/// @return meta_ The metadata of the proposed batch.
function proposeBatch(
bytes calldata _params,
bytes calldata _txList
)
external
public
nonReentrant
returns (BatchMetadata memory meta_)
returns (BatchInfo memory info_, BatchMetadata memory meta_)
{
Stats2 memory stats2 = state.stats2;
require(!stats2.paused, ContractPaused());
Expand All @@ -69,32 +70,44 @@ abstract contract TaikoInbox is EssentialContract, ITaikoInbox, ITaiko, IFork {
BatchParams memory params = abi.decode(_params, (BatchParams));

{
address preconfRouter = resolve(LibStrings.B_PRECONF_ROUTER, true);
if (preconfRouter == address(0)) {
address operator = resolve(LibStrings.B_INBOX_OPERATOR, true);
if (operator == address(0)) {
require(params.proposer == address(0), CustomProposerNotAllowed());
params.proposer = msg.sender;

// blob hashes are only accepted if the caller is trusted.
require(params.blobParams.blobHashes.length == 0, InvalidBlobParams());
} else {
require(msg.sender == preconfRouter, NotPreconfRouter());
require(msg.sender == operator, NotInboxOperator());
require(params.proposer != address(0), CustomProposerMissing());
}

if (params.coinbase == address(0)) {
params.coinbase = params.proposer;
}

if (params.revertIfNotFirstProposal) {
require(state.stats2.lastProposedIn != block.number, NotFirstProposal());
}
}

if (params.revertIfNotFirstProposal) {
require(state.stats2.lastProposedIn != block.number, NotFirstProposal());
bool calldataUsed = _txList.length != 0;

if (!calldataUsed) {
if (params.blobParams.blobHashes.length == 0) {
require(params.blobParams.numBlobs != 0, BlobNotSpecified());
} else {
require(
params.blobParams.numBlobs == 0 && params.blobParams.firstBlobIndex == 0,
InvalidBlobParams()
);
}
}

// Keep track of last batch's information.
Batch storage lastBatch =
state.batches[(stats2.numBatches - 1) % config.batchRingBufferSize];

bool calldataUsed = _txList.length != 0;

require(calldataUsed || params.blobParams.numBlobs != 0, BlobNotSpecified());

(uint64 anchorBlockId, uint64 lastBlockTimestamp) = _validateBatchParams(
params,
config.maxAnchorHeightOffset,
Expand All @@ -113,37 +126,43 @@ abstract contract TaikoInbox is EssentialContract, ITaikoInbox, ITaiko, IFork {
// use
// the following approach to calculate a block's difficulty:
// `keccak256(abi.encode("TAIKO_DIFFICULTY", block.number))`

meta_ = BatchMetadata({
txListHash: calldataUsed ? keccak256(_txList) : _calcTxListHash(params.blobParams),
info_ = BatchInfo({
txsHash: bytes32(0), // to be initialised later
//
// Data to build L2 blocks
blocks: params.blocks,
blobHashes: new bytes32[](0), // to be initialised later
extraData: bytes32(uint256(config.baseFeeConfig.sharingPctg)),
coinbase: params.coinbase,
batchId: stats2.numBatches,
gasLimit: config.blockMaxGasLimit,
lastBlockTimestamp: lastBlockTimestamp,
parentMetaHash: lastBatch.metaHash,
proposer: params.proposer,
livenessBond: config.livenessBondBase
+ config.livenessBondPerBlock * uint96(params.blocks.length),
proposedAt: uint64(block.timestamp),
proposedIn: uint64(block.number),
blobByteOffset: params.blobParams.byteOffset,
blobByteSize: params.blobParams.byteSize,
gasLimit: config.blockMaxGasLimit,
//
// Data for the L2 anchor transaction, shared by all blocks in the batch
anchorBlockId: anchorBlockId,
anchorBlockHash: blockhash(anchorBlockId),
signalSlots: params.signalSlots,
blocks: params.blocks,
anchorInput: params.anchorInput,
blobParams: params.blobParams,
baseFeeConfig: config.baseFeeConfig
baseFeeConfig: config.baseFeeConfig,
signalSlots: params.signalSlots
});

require(meta_.anchorBlockHash != 0, ZeroAnchorBlockHash());
require(meta_.txListHash != 0, BlobNotFound());
bytes32 metaHash = keccak256(abi.encode(meta_));
require(info_.anchorBlockHash != 0, ZeroAnchorBlockHash());

(info_.txsHash, info_.blobHashes) =
_calculateTxsHash(keccak256(_txList), params.blobParams);

meta_ = BatchMetadata({
infoHash: keccak256(abi.encode(info_)),
proposer: params.proposer,
batchId: stats2.numBatches,
proposedAt: uint64(block.timestamp)
});

Batch storage batch = state.batches[stats2.numBatches % config.batchRingBufferSize];

// SSTORE #1
batch.metaHash = metaHash;
batch.metaHash = keccak256(abi.encode(meta_));

// SSTORE #2 {{
batch.batchId = stats2.numBatches;
Expand All @@ -154,21 +173,25 @@ abstract contract TaikoInbox is EssentialContract, ITaikoInbox, ITaiko, IFork {
batch.reserved4 = 0;
// SSTORE }}

uint96 livenessBond =
config.livenessBondBase + config.livenessBondPerBlock * uint96(params.blocks.length);
_debitBond(params.proposer, livenessBond);

// SSTORE #3 {{
if (stats2.numBatches == config.forkHeights.pacaya) {
batch.lastBlockId = batch.batchId + uint64(params.blocks.length) - 1;
} else {
batch.lastBlockId = lastBatch.lastBlockId + uint64(params.blocks.length);
}
batch.livenessBond = meta_.livenessBond;

batch.livenessBond = livenessBond;
batch._reserved3 = 0;
// SSTORE }}

stats2.numBatches += 1;
stats2.lastProposedIn = uint56(block.number);

_debitBond(params.proposer, meta_.livenessBond);
emit BatchProposed(meta_, calldataUsed, _txList);
emit BatchProposed(info_, meta_, _txList);
} // end-of-unchecked

_verifyBatches(config, stats2, 1);
Expand Down Expand Up @@ -515,18 +538,30 @@ abstract contract TaikoInbox is EssentialContract, ITaikoInbox, ITaiko, IFork {
state.stats2.paused = true;
}

function _calcTxListHash(BlobParams memory _blobParams)
function _calculateTxsHash(
bytes32 _txListHash,
BlobParams memory _blobParams
)
internal
view
virtual
returns (bytes32)
returns (bytes32 hash_, bytes32[] memory blobHashes_)
{
bytes32[] memory blobHashes = new bytes32[](_blobParams.numBlobs);
for (uint256 i; i < _blobParams.numBlobs; ++i) {
blobHashes[i] = blobhash(_blobParams.firstBlobIndex + i);
require(blobHashes[i] != 0, BlobNotFound());
unchecked {
if (_blobParams.blobHashes.length != 0) {
blobHashes_ = _blobParams.blobHashes;
} else {
blobHashes_ = new bytes32[](_blobParams.numBlobs);
for (uint256 i; i < _blobParams.numBlobs; ++i) {
blobHashes_[i] = blobhash(_blobParams.firstBlobIndex + i);
}
}

for (uint256 i; i < blobHashes_.length; ++i) {
require(blobHashes_[i] != 0, BlobNotFound());
}
hash_ = keccak256(abi.encode(_txListHash, blobHashes_));
}
return keccak256(abi.encode(blobHashes));
}

// Private functions -----------------------------------------------------------------------
Expand Down
Loading