From 1b9ab4d151836fc53c6ebdcaeb665561593849f8 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 29 Aug 2024 18:59:12 -0300 Subject: [PATCH] feat: Track proving times in prover stats in CLI Adds an option to compute proving times in the prover-stats CLI command, and split stats based on a configurable cutoff. Also ensures that the same proven block is only counted once per prover. Adds lodash.chunk. Sorry about that. --- .../archiver/src/archiver/data_retrieval.ts | 5 +- yarn-project/archiver/src/index.ts | 5 +- yarn-project/cli/package.json | 2 + yarn-project/cli/src/cmds/l1/index.ts | 21 ++- yarn-project/cli/src/cmds/l1/prover_stats.ts | 135 ++++++++++++++++-- .../foundation/src/collection/object.ts | 8 +- yarn-project/yarn.lock | 2 + 7 files changed, 157 insertions(+), 21 deletions(-) diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index be40c7bdb76..4ae92665b3d 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -4,7 +4,7 @@ import { type EthAddress } from '@aztec/foundation/eth-address'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; -import { type PublicClient, getAbiItem } from 'viem'; +import { type Hex, type PublicClient, getAbiItem } from 'viem'; import { getL2BlockProposedLogs, @@ -147,7 +147,7 @@ export async function retrieveL2ProofVerifiedEvents( rollupAddress: EthAddress, searchStartBlock: bigint, searchEndBlock?: bigint, -): Promise<{ l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr }[]> { +): Promise<{ l1BlockNumber: bigint; l2BlockNumber: bigint; proverId: Fr; txHash: Hex }[]> { const logs = await publicClient.getLogs({ address: rollupAddress.toString(), fromBlock: searchStartBlock, @@ -160,5 +160,6 @@ export async function retrieveL2ProofVerifiedEvents( l1BlockNumber: log.blockNumber, l2BlockNumber: log.args.blockNumber, proverId: Fr.fromString(log.args.proverId), + txHash: log.transactionHash, })); } diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 523565aea4e..37b0f6da3b6 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -13,8 +13,9 @@ export * from './archiver/index.js'; export * from './rpc/index.js'; export * from './factory.js'; -// We are not storing the info from these events in the archiver for now (and we don't really need to), so we expose this query directly -export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js'; +export { retrieveL2ProofVerifiedEvents, retrieveBlockMetadataFromRollup } from './archiver/data_retrieval.js'; + +export { getL2BlockProposedLogs } from './archiver/eth_log_handlers.js'; const log = createDebugLogger('aztec:archiver'); diff --git a/yarn-project/cli/package.json b/yarn-project/cli/package.json index 7c9959d2b1e..c0f4af79732 100644 --- a/yarn-project/cli/package.json +++ b/yarn-project/cli/package.json @@ -72,6 +72,7 @@ "@iarna/toml": "^2.2.5", "@libp2p/peer-id-factory": "^3.0.4", "commander": "^12.1.0", + "lodash.chunk": "^4.2.0", "lodash.groupby": "^4.6.0", "semver": "^7.5.4", "solc": "^0.8.26", @@ -86,6 +87,7 @@ "@aztec/protocol-contracts": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", + "@types/lodash.chunk": "^4.2.9", "@types/lodash.groupby": "^4.6.9", "@types/lodash.startcase": "^4.4.7", "@types/node": "^18.7.23", diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index 50dfa071e2e..4f8dd9ae6d6 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -285,17 +285,32 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL ETHEREUM_HOST, ) .addOption(l1ChainIdOption) - .option('--start-block ', 'The block number to start from', parseBigint, 1n) + .option('--start-block ', 'The L1 block number to start from', parseBigint, 1n) + .option('--end-block ', 'The last L1 block number to query', parseBigint) .option('--batch-size ', 'The number of blocks to query in each batch', parseBigint, 100n) + .option('--proving-timeout ', 'Cutoff for proving time to consider a block', parseBigint) .option('--l1-rollup-address ', 'Address of the rollup contract (required if node URL is not set)') .option( '--node-url ', 'JSON RPC URL of an Aztec node to retrieve the rollup contract address (required if L1 rollup address is not set)', ) + .option('--raw-logs', 'Output raw logs instead of aggregated stats') .action(async options => { const { proverStats } = await import('./prover_stats.js'); - const { l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl } = options; - await proverStats({ l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl, log }); + const { l1RpcUrl, chainId, l1RollupAddress, startBlock, endBlock, batchSize, nodeUrl, provingTimeout, rawLogs } = + options; + await proverStats({ + l1RpcUrl, + chainId, + l1RollupAddress, + startBlock, + endBlock, + batchSize, + nodeUrl, + provingTimeout, + rawLogs, + log, + }); }); return program; diff --git a/yarn-project/cli/src/cmds/l1/prover_stats.ts b/yarn-project/cli/src/cmds/l1/prover_stats.ts index 4cda8a13a45..316029d12d0 100644 --- a/yarn-project/cli/src/cmds/l1/prover_stats.ts +++ b/yarn-project/cli/src/cmds/l1/prover_stats.ts @@ -1,11 +1,13 @@ -import { retrieveL2ProofVerifiedEvents } from '@aztec/archiver'; +import { getL2BlockProposedLogs, retrieveL2ProofVerifiedEvents } from '@aztec/archiver'; import { createAztecNodeClient } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; -import { type LogFn, createDebugLogger } from '@aztec/foundation/log'; +import { compactArray, mapValues, unique } from '@aztec/foundation/collection'; +import { type LogFn, type Logger, createDebugLogger } from '@aztec/foundation/log'; +import chunk from 'lodash.chunk'; import groupBy from 'lodash.groupby'; -import { createPublicClient, http } from 'viem'; +import { type PublicClient, createPublicClient, http } from 'viem'; export async function proverStats(opts: { l1RpcUrl: string; @@ -14,13 +16,18 @@ export async function proverStats(opts: { nodeUrl: string | undefined; log: LogFn; startBlock: bigint; + endBlock: bigint | undefined; batchSize: bigint; + provingTimeout: bigint | undefined; + rawLogs: boolean; }) { const debugLog = createDebugLogger('aztec:cli:prover_stats'); - const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, log } = opts; + const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, provingTimeout, endBlock, rawLogs, log } = + opts; if (!l1RollupAddress && !nodeUrl) { throw new Error('Either L1 rollup address or node URL must be set'); } + const rollup = l1RollupAddress ? EthAddress.fromString(l1RollupAddress) : await createAztecNodeClient(nodeUrl!) @@ -29,22 +36,130 @@ export async function proverStats(opts: { const chain = createEthereumChain(l1RpcUrl, chainId).chainInfo; const publicClient = createPublicClient({ chain, transport: http(l1RpcUrl) }); - const lastBlockNum = await publicClient.getBlockNumber(); + const lastBlockNum = endBlock ?? (await publicClient.getBlockNumber()); debugLog.verbose(`Querying events on rollup at ${rollup.toString()} from ${startBlock} up to ${lastBlockNum}`); + // Get all events for L2 proof submissions + const events = await getL2ProofVerifiedEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup); + + // If we only care for raw logs, output them + if (rawLogs) { + log(`l1_block_number, l2_block_number, prover_id, tx_hash`); + for (const event of events) { + const { l1BlockNumber, l2BlockNumber, proverId, txHash } = event; + log(`${l1BlockNumber}, ${l2BlockNumber}, ${proverId}, ${txHash}`); + } + return; + } + + // If we don't have a proving timeout, we can just count the number of unique blocks per prover + if (!provingTimeout) { + const stats = groupBy(events, 'proverId'); + log(`prover_id, total_blocks_proven`); + for (const proverId in stats) { + const uniqueBlocks = new Set(stats[proverId].map(e => e.l2BlockNumber)); + log(`${proverId}, ${uniqueBlocks.size}`); + } + return; + } + + // But if we do, fetch the events for each block submitted, so we can look up their timestamp + const blockEvents = await getL2BlockEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup); + debugLog.verbose( + `First L2 block within range is ${blockEvents[0]?.args.blockNumber} at L1 block ${blockEvents[0]?.blockNumber}`, + ); + + // Get the timestamps for every block on every log, both for proof and block submissions + const l1BlockNumbers = unique([...events.map(e => e.l1BlockNumber), ...blockEvents.map(e => e.blockNumber)]); + const l1BlockTimestamps: Record = {}; + for (const l1Batch of chunk(l1BlockNumbers, Number(batchSize))) { + const blocks = await Promise.all( + l1Batch.map(blockNumber => publicClient.getBlock({ includeTransactions: false, blockNumber })), + ); + debugLog.verbose(`Queried ${blocks.length} L1 blocks between ${l1Batch[0]} and ${l1Batch[l1Batch.length - 1]}`); + for (const block of blocks) { + l1BlockTimestamps[block.number.toString()] = block.timestamp; + } + } + + // Map from l2 block number to the l1 block in which it was submitted + const l2BlockSubmissions: Record = {}; + for (const blockEvent of blockEvents) { + l2BlockSubmissions[blockEvent.args.blockNumber.toString()] = blockEvent.blockNumber; + } + + // Now calculate stats + const stats = mapValues(groupBy(events, 'proverId'), (blocks, proverId) => + compactArray( + blocks.map(e => { + const provenTimestamp = l1BlockTimestamps[e.l1BlockNumber.toString()]; + const uploadedBlockNumber = l2BlockSubmissions[e.l2BlockNumber.toString()]; + if (!uploadedBlockNumber) { + debugLog.verbose( + `Skipping ${proverId}'s proof for L2 block ${e.l2BlockNumber} as it was before the start block`, + ); + return undefined; + } + const uploadedTimestamp = l1BlockTimestamps[uploadedBlockNumber.toString()]; + const provingTime = provenTimestamp - uploadedTimestamp; + debugLog.debug( + `prover=${e.proverId} blockNumber=${e.l2BlockNumber} uploaded=${uploadedTimestamp} proven=${provenTimestamp} time=${provingTime}`, + ); + return { provenTimestamp, uploadedTimestamp, provingTime, ...e }; + }), + ), + ); + + log(`prover_id, blocks_proven_within_timeout, total_blocks_proven, avg_proving_time`); + for (const proverId in stats) { + const blocks = stats[proverId]; + const withinTimeout = blocks.filter(b => b.provingTime <= provingTimeout); + const uniqueBlocksWithinTimeout = new Set(withinTimeout.map(e => e.l2BlockNumber)); + const uniqueBlocks = new Set(blocks.map(e => e.l2BlockNumber)); + const avgProvingTime = + blocks.length === 0 ? 0 : Math.ceil(Number(blocks.reduce((acc, b) => acc + b.provingTime, 0n)) / blocks.length); + + log(`${proverId}, ${uniqueBlocksWithinTimeout.size}, ${uniqueBlocks.size}, ${avgProvingTime}`); + } + return; +} + +async function getL2ProofVerifiedEvents( + startBlock: bigint, + lastBlockNum: bigint, + batchSize: bigint, + debugLog: Logger, + publicClient: PublicClient, + rollup: EthAddress, +) { let blockNum = startBlock; const events = []; while (blockNum <= lastBlockNum) { const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize; - debugLog.verbose(`Querying events from block ${blockNum} to ${end}`); const newEvents = await retrieveL2ProofVerifiedEvents(publicClient, rollup, blockNum, end); events.push(...newEvents); - debugLog.verbose(`Got ${newEvents.length} events`); + debugLog.verbose(`Got ${newEvents.length} events querying l2 proof verified from block ${blockNum} to ${end}`); blockNum += batchSize; } + return events; +} - const stats = groupBy(events, 'proverId'); - for (const proverId in stats) { - log(`${proverId}, ${stats[proverId].length}`); +async function getL2BlockEvents( + startBlock: bigint, + lastBlockNum: bigint, + batchSize: bigint, + debugLog: Logger, + publicClient: PublicClient, + rollup: EthAddress, +) { + let blockNum = startBlock; + const events = []; + while (blockNum <= lastBlockNum) { + const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize; + const newEvents = await getL2BlockProposedLogs(publicClient, rollup, blockNum, end); + events.push(...newEvents); + debugLog.verbose(`Got ${newEvents.length} events querying l2 block submitted from block ${blockNum} to ${end}`); + blockNum += batchSize; } + return events; } diff --git a/yarn-project/foundation/src/collection/object.ts b/yarn-project/foundation/src/collection/object.ts index b55c3b8688f..9603daf4543 100644 --- a/yarn-project/foundation/src/collection/object.ts +++ b/yarn-project/foundation/src/collection/object.ts @@ -1,19 +1,19 @@ /** Returns a new object with the same keys and where each value has been passed through the mapping function. */ export function mapValues( obj: Record, - fn: (value: T) => U, + fn: (value: T, key: K) => U, ): Record; export function mapValues( obj: Partial>, - fn: (value: T) => U, + fn: (value: T, key: K) => U, ): Partial>; export function mapValues( obj: Record, - fn: (value: T) => U, + fn: (value: T, key: K) => U, ): Record { const result: Record = {} as Record; for (const key in obj) { - result[key] = fn(obj[key]); + result[key] = fn(obj[key], key); } return result; } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 56cea10eb58..3c960d96f53 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -463,6 +463,7 @@ __metadata: "@jest/globals": ^29.5.0 "@libp2p/peer-id-factory": ^3.0.4 "@types/jest": ^29.5.0 + "@types/lodash.chunk": ^4.2.9 "@types/lodash.groupby": ^4.6.9 "@types/lodash.startcase": ^4.4.7 "@types/node": ^18.7.23 @@ -471,6 +472,7 @@ __metadata: commander: ^12.1.0 jest: ^29.5.0 jest-mock-extended: ^3.0.5 + lodash.chunk: ^4.2.0 lodash.groupby: ^4.6.0 semver: ^7.5.4 solc: ^0.8.26