From dda6d41c98dc432ed095adcaca87c8268f9240f0 Mon Sep 17 00:00:00 2001 From: harkamal Date: Tue, 5 Sep 2023 18:39:25 +0530 Subject: [PATCH 1/8] client: some skeleton improvements from observations on devnet syncs --- packages/client/src/sync/skeleton.ts | 45 +++++++++++++++++++++------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/packages/client/src/sync/skeleton.ts b/packages/client/src/sync/skeleton.ts index b30caf9cff..b68739184d 100644 --- a/packages/client/src/sync/skeleton.ts +++ b/packages/client/src/sync/skeleton.ts @@ -324,13 +324,17 @@ export class Skeleton extends MetaDBManager { ) const reorg = await this.processNewHead(head, force) if (force && reorg) { - // It could just be a reorg at this head with previous tail preserved + // It could just be a reorg at this head with previous tail preserved unless + // 1. parent is not present in skeleton (it could be in chain for whatever reason) or + // 2. the parent < current skeleton tail + // then we need to reset the subchain itself const subchain = this.status.progress.subchains[0] - const parent = await this.getBlock(head.header.number - BigInt(1)) + const parent = await this.getBlock(head.header.number - BigInt(1), true) if ( subchain === undefined || parent === undefined || - !equalsBytes(parent.hash(), head.header.parentHash) + !equalsBytes(parent.hash(), head.header.parentHash) || + parent.header.number < subchain.tail ) { const s = { head: head.header.number, @@ -418,18 +422,33 @@ export class Skeleton extends MetaDBManager { continue } else { // Partially overwritten, trim the head to the overwritten size + this.status.progress.subchains[1].head = this.status.progress.subchains[0].tail - BigInt(1) this.config.logger.debug( - `Previous subchain partially overwritten head=${head} tail=${tail} next=${short(next)}` + `Previous subchain partially overwritten head=${head} tail=${tail} next=${short( + next + )} with newHead=${this.status.progress.subchains[1].head}` ) - this.status.progress.subchains[1].head = this.status.progress.subchains[0].tail - BigInt(1) edited = true } // If the old subchain is an extension of the new one, merge the two // and let the skeleton syncer restart (to clean internal state) - const subChain1Head = await this.getBlock(this.status.progress.subchains[1].head) + // subchains are useful is subChain1Head is in skeleton only and its tail correct + const subChain1Head = await this.getBlock(this.status.progress.subchains[1].head, true) + // tail lookup can be from skeleton or chain + const subChain1Tail = await this.getBlock(this.status.progress.subchains[1].tail) if ( - subChain1Head !== undefined && + subChain1Head === undefined || + subChain1Tail === undefined || + !equalsBytes(subChain1Tail.header.parentHash, this.status.progress.subchains[1].next) + ) { + // if subChain1Head is not in the skeleton that all previous subchains are not useful + // and better to junk + this.config.logger.debug( + `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head}` + ) + this.status.progress.subchains.splice(1, this.status.progress.subchains.length - 1) + } else if ( equalsBytes(subChain1Head.hash(), this.status.progress.subchains[0].next) === true ) { // only merge is we can integrate a big progress, as each merge leads @@ -565,13 +584,14 @@ export class Skeleton extends MetaDBManager { }) } - private async backStep(): Promise { + private async backStep(fromBlock: bigint): Promise { try { if (this.config.skeletonFillCanonicalBackStep <= 0) return null const { head, tail } = this.bounds() + // by default we try back stepping from tail or fromBlock whichever is bigger + let newTail: bigint | null = tail < fromBlock ? fromBlock : tail let tailBlock - let newTail: bigint | null = tail do { newTail = newTail + BigInt(this.config.skeletonFillCanonicalBackStep) tailBlock = await this.getBlock(newTail, true) @@ -597,6 +617,7 @@ export class Skeleton extends MetaDBManager { return null } } finally { + this.status.canonicalHeadReset = true this.status.linked = await this.checkLinked() } } @@ -646,7 +667,8 @@ export class Skeleton extends MetaDBManager { `fillCanonicalChain block number=${number} not found, backStepping` ) await this.runWithLock(async () => { - await this.backStep() + // backstep the subchain from the block that was not found + await this.backStep(number) }) break } @@ -661,6 +683,7 @@ export class Skeleton extends MetaDBManager { await this.chain.putBlocks([oldHead], true) } } + if (numBlocksInserted !== 1) { this.config.logger.error( `Failed to put block number=${number} fork=${block.common.hardfork()} hash=${short( @@ -694,7 +717,7 @@ export class Skeleton extends MetaDBManager { ) } await this.runWithLock(async () => { - await this.backStep() + await this.backStep(number) }) break } From 6047ba0c8f70dafff298a2b312bb1c7afe936324 Mon Sep 17 00:00:00 2001 From: harkamal Date: Tue, 5 Sep 2023 23:06:20 +0530 Subject: [PATCH 2/8] skip filling chain on duplicate sethead --- packages/client/src/sync/skeleton.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/client/src/sync/skeleton.ts b/packages/client/src/sync/skeleton.ts index b68739184d..3506c076a4 100644 --- a/packages/client/src/sync/skeleton.ts +++ b/packages/client/src/sync/skeleton.ts @@ -237,7 +237,7 @@ export class Skeleton extends MetaDBManager { const mayBeDupBlock = await this.getBlock(number) if (mayBeDupBlock !== undefined && equalsBytes(mayBeDupBlock.header.hash(), head.hash())) { this.config.logger.debug( - `Skeleton duplicate announcement tail=${lastchain.tail} head=${ + `Skeleton duplicate ${force ? 'setHead' : 'announcement'} tail=${lastchain.tail} head=${ lastchain.head } number=${number} hash=${short(head.hash())}` ) @@ -363,7 +363,12 @@ export class Skeleton extends MetaDBManager { if (force || init) { await this.writeSyncStatus() } - if (force && this.status.linked) { + if ( + (force && + this.status.linked && + head.header.number > this.status.progress.subchains[0]?.head) ?? + BigInt(0) + ) { void this.fillCanonicalChain() } // Earlier we were throwing on reorg, essentially for the purposes for killing the reverse fetcher From 94b54aa89ae1ebb457e730da0fc196f1862a1e00 Mon Sep 17 00:00:00 2001 From: harkamal Date: Sat, 9 Sep 2023 14:41:47 +0530 Subject: [PATCH 3/8] apply nits --- packages/client/src/sync/skeleton.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/client/src/sync/skeleton.ts b/packages/client/src/sync/skeleton.ts index 3506c076a4..82e989b028 100644 --- a/packages/client/src/sync/skeleton.ts +++ b/packages/client/src/sync/skeleton.ts @@ -438,7 +438,7 @@ export class Skeleton extends MetaDBManager { // If the old subchain is an extension of the new one, merge the two // and let the skeleton syncer restart (to clean internal state) - // subchains are useful is subChain1Head is in skeleton only and its tail correct + // subchains are useful if subChain1Head is in skeleton only and its tail correct const subChain1Head = await this.getBlock(this.status.progress.subchains[1].head, true) // tail lookup can be from skeleton or chain const subChain1Tail = await this.getBlock(this.status.progress.subchains[1].tail) @@ -447,7 +447,7 @@ export class Skeleton extends MetaDBManager { subChain1Tail === undefined || !equalsBytes(subChain1Tail.header.parentHash, this.status.progress.subchains[1].next) ) { - // if subChain1Head is not in the skeleton that all previous subchains are not useful + // if subChain1Head is not in the skeleton then all previous subchains are not useful // and better to junk this.config.logger.debug( `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head}` From 1f5b246e0f7d6602503f8e749d808e0eac577fc3 Mon Sep 17 00:00:00 2001 From: harkamal Date: Tue, 12 Sep 2023 00:29:58 +0530 Subject: [PATCH 4/8] add beacon sync sim for incline client --- package-lock.json | 12 +- packages/block/src/index.ts | 1 + packages/client/package.json | 1 + packages/client/test/sim/beaconsync.md | 26 +++ packages/client/test/sim/beaconsync.spec.ts | 220 ++++++++++++++++++ packages/client/test/sim/configs/mainnet.json | 4 +- packages/client/test/sim/mainnet.spec.ts | 51 ++-- packages/client/test/sim/simutils.ts | 151 +++++++++++- packages/client/test/sim/single-run.sh | 9 +- packages/client/test/sim/snapsync.spec.ts | 5 +- 10 files changed, 451 insertions(+), 29 deletions(-) create mode 100644 packages/client/test/sim/beaconsync.md create mode 100644 packages/client/test/sim/beaconsync.spec.ts diff --git a/package-lock.json b/package-lock.json index 87d9461697..3b8eb2277a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7202,6 +7202,15 @@ "node": ">=0.8.x" } }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "dev": true, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -15444,6 +15453,7 @@ "@types/yargs": "^17.0.24", "constants-browserify": "^1.0.0", "crypto-browserify": "^3.12.0", + "eventsource": "^2.0.2", "file-replace-loader": "^1.2.0", "it-pair": "^1.0.0", "it-pushable": "^1.4.2", @@ -15701,7 +15711,7 @@ "version": "5.0.0", "license": "MPL-2.0", "bin": { - "rlp": "bin/rlp" + "rlp": "bin/rlp.cjs" }, "engines": { "node": ">=18" diff --git a/packages/block/src/index.ts b/packages/block/src/index.ts index add7d33700..b6b4faa482 100644 --- a/packages/block/src/index.ts +++ b/packages/block/src/index.ts @@ -1,4 +1,5 @@ export { Block } from './block.js' +export { executionPayloadFromBeaconPayload } from './from-beacon-payload.js' export { BlockHeader } from './header.js' export { getDifficulty, valuesArrayToHeaderData } from './helpers.js' export * from './types.js' diff --git a/packages/client/package.json b/packages/client/package.json index 6e7ba92045..783067d754 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -98,6 +98,7 @@ "@types/yargs": "^17.0.24", "constants-browserify": "^1.0.0", "crypto-browserify": "^3.12.0", + "eventsource": "^2.0.2", "file-replace-loader": "^1.2.0", "it-pair": "^1.0.0", "it-pushable": "^1.4.2", diff --git a/packages/client/test/sim/beaconsync.md b/packages/client/test/sim/beaconsync.md new file mode 100644 index 0000000000..5350010e1b --- /dev/null +++ b/packages/client/test/sim/beaconsync.md @@ -0,0 +1,26 @@ +### Beaconsync sim setup + +## Prerequisites + +1. Bash terminal +2. Docker (without sudo) +3. `jq` & `curl` installed +4. `ethereumjs-monorepo` codebase build via `npm i` (for e.g. at `/usr/app/ethereumjs`) + +You may pre-download docker images for lodestar (`docker pull chainsafe/lodestar:latest`) and geth (`docker pull ethereum/client-go:v1.11.6`) to avoid any test timeout issues. + +Note: All commands should be run from the `client` package directory root (so something like `/usr/app/ethereumjs/packages/client`) + +## How to run + +1. Cleanup some datadirs + +```bash +rm -rf ./datadir (if you have had previous runs) +``` + +2. Run the sim + +```bash +BEACON_SYNC=true NETWORK=mainnet NETWORKID=1337903 ELCLIENT=geth npx vitest run test/sim/beaconsync.spec.ts +``` \ No newline at end of file diff --git a/packages/client/test/sim/beaconsync.spec.ts b/packages/client/test/sim/beaconsync.spec.ts new file mode 100644 index 0000000000..adc556a52f --- /dev/null +++ b/packages/client/test/sim/beaconsync.spec.ts @@ -0,0 +1,220 @@ +import { Common } from '@ethereumjs/common' +import { bytesToHex, hexToBytes, parseGethGenesisState, privateToAddress } from '@ethereumjs/util' +import debug from 'debug' +import { Client } from 'jayson/promise' +import { assert, describe, it } from 'vitest' + +import { Config } from '../../src/config.js' +import { getLogger } from '../../src/logging.js' +import { Event } from '../../src/types.js' + +import { + createInlineClient, + filterKeywords, + filterOutWords, + runTxHelper, + setupEngineUpdateRelay, + startNetwork, + waitForELStart, +} from './simutils' + +import type { EthereumClient } from '../../src/client' +import type { RlpxServer } from '../../src/net/server' + +const client = Client.http({ port: 8545 }) + +const network = 'mainnet' +const networkJson = require(`./configs/${network}.json`) +const common = Common.fromGethGenesis(networkJson, { chain: network }) +const customGenesisState = parseGethGenesisState(networkJson) + +const pkey = hexToBytes('0xae557af4ceefda559c924516cabf029bedc36b68109bf8d6183fe96e04121f4e') +// 0x97C9B168C5E14d5D369B6D88E9776E5B7b11dcC1 +const sender = bytesToHex(privateToAddress(pkey)) + +let ejsClient: EthereumClient | null = null +let beaconSyncRelayer: any = null + +// This account doesn't exist in the genesis so starting balance is zero +const EOATransferToAccount = '0x3dA33B9A0894b908DdBb00d96399e506515A1009' +let EOATransferToBalance = BigInt(0) + +export async function runTx(data: string, to?: string, value?: bigint) { + return runTxHelper({ client, common, sender, pkey }, data, to, value) +} + +describe('simple mainnet test run', async () => { + if (process.env.EXTRA_CL_PARAMS === undefined) { + process.env.EXTRA_CL_PARAMS = `--params.CAPELLA_FORK_EPOCH 0` + process.env.GENESIS_DELAY = 5 + } + + // Better add it as a option in startnetwork + process.env.NETWORKID = `${common.networkId()}` + const { teardownCallBack, result } = await startNetwork(network, client, { + filterKeywords, + filterOutWords, + externalRun: process.env.EXTERNAL_RUN, + withPeer: process.env.WITH_PEER, + }) + + if (result.includes('Geth')) { + assert.ok(true, 'connected to Geth') + } else { + assert.fail('connected to wrong client') + } + + const nodeInfo = (await client.request('admin_nodeInfo', [])).result + assert.ok(nodeInfo.enode !== undefined, 'fetched enode for peering') + + console.log(`Waiting for network to start...`) + try { + await waitForELStart(client) + assert.ok(true, 'geth<>lodestar started successfully') + } catch (e) { + assert.fail('geth<>lodestar failed to start') + throw e + } + + // ------------Sanity checks-------------------------------- + it.skipIf(process.env.ADD_EOA_STATE === undefined)( + 'add some EOA transfers', + async () => { + let balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) + assert.equal( + EOATransferToBalance, + BigInt(balance.result), + `fetched ${EOATransferToAccount} balance=${EOATransferToBalance}` + ) + balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) + + await runTx('', EOATransferToAccount, 1000000n) + EOATransferToBalance += 1000000n + + balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) + assert.equal(BigInt(balance.result), EOATransferToBalance, 'sent a simple ETH transfer') + await runTx('', EOATransferToAccount, 1000000n) + EOATransferToBalance += 1000000n + + balance = await client.request('eth_getBalance', [EOATransferToAccount, 'latest']) + assert.equal(BigInt(balance.result), EOATransferToBalance, 'sent a simple ETH transfer 2x') + + balance = await client.request('eth_getBalance', [sender, 'latest']) + assert.ok( + balance.result !== undefined, + 'remaining sender balance after transfers and gas fee' + ) + }, + 2 * 60_000 + ) + + it.skipIf(process.env.BEACON_SYNC === undefined)( + 'setup beacon sync', + async () => { + // start client inline here for beacon sync + const peerBeaconUrl = 'http://127.0.0.1:9596' + const { + ejsInlineClient, + peerConnectedPromise, + beaconSyncRelayer: relayer, + // eslint-disable-next-line @typescript-eslint/no-use-before-define + } = (await createBeaconSyncClient( + common, + customGenesisState, + [nodeInfo.enode], + peerBeaconUrl + ).catch((e) => { + console.log(e) + return null + })) ?? { + ejsInlineClient: null, + peerConnectedPromise: Promise.reject('Client creation error'), + } + ejsClient = ejsInlineClient + beaconSyncRelayer = relayer + assert.ok(ejsClient !== null, 'ethereumjs client started') + + const enode = (ejsClient!.server('rlpx') as RlpxServer)!.getRlpxInfo().enode + const res = await client.request('admin_addPeer', [enode]) + assert.equal(res.result, true, 'successfully requested Geth add EthereumJS as peer') + + const peerConnectTimeout = new Promise((_resolve, reject) => setTimeout(reject, 10000)) + try { + await Promise.race([peerConnectedPromise, peerConnectTimeout]) + assert.ok(true, 'connected to geth peer') + } catch (e) { + assert.fail('could not connect to geth peer in 10 seconds') + } + }, + 60_000 + ) + + it.skipIf(process.env.BEACON_SYNC === undefined)( + 'should beacon sync and finish', + async () => { + if (ejsClient !== null && beaconSyncRelayer !== null) { + // wait on the sync promise to complete if it has been called independently + const syncTimeout = new Promise((_resolve, reject) => setTimeout(reject, 8 * 60_000)) + const beaconSyncPromise = beaconSyncRelayer.start() + + try { + // call sync if not has been called yet + void ejsClient.services[0].synchronizer?.sync() + await Promise.race([beaconSyncPromise, syncTimeout]) + assert(beaconSyncRelayer.status, 'SYNCED', 'beaconSyncRelayer should have synced client') + await ejsClient.stop() + assert.ok(true, 'completed beacon sync') + } catch (e) { + assert.fail('could not complete beacon sync in 8 minutes') + } + } else { + assert.fail('ethereumjs client not setup properly for snap sync') + } + }, + 10 * 60_000 + ) + + it('network cleanup', async () => { + try { + await teardownCallBack() + assert.ok(true, 'network cleaned') + } catch (e) { + assert.fail('network not cleaned properly') + } + }, 60_000) +}) + +async function createBeaconSyncClient( + common: any, + customGenesisState: any, + bootnodes: any, + peerBeaconUrl: any, + datadir: any +) { + // Turn on `debug` logs, defaults to all client logging + debug.enable(process.env.DEBUG_SNAP ?? '') + const logger = getLogger({ logLevel: 'debug' }) + const config = new Config({ + common, + transports: ['rlpx'], + bootnodes, + multiaddrs: [], + logger, + discDns: false, + discV4: false, + port: 30304, + maxFetcherJobs: 10, + }) + const peerConnectedPromise = new Promise((resolve) => { + config.events.once(Event.PEER_CONNECTED, (peer: any) => resolve(peer)) + }) + + const ejsInlineClient = await createInlineClient(config, common, customGenesisState, datadir) + const beaconSyncRelayer = await setupEngineUpdateRelay(ejsInlineClient, peerBeaconUrl) + return { ejsInlineClient, peerConnectedPromise, beaconSyncRelayer } +} + +process.on('uncaughtException', (err, origin) => { + console.log({ err, origin }) + process.exit() +}) diff --git a/packages/client/test/sim/configs/mainnet.json b/packages/client/test/sim/configs/mainnet.json index 4aaa0dbded..321a864e18 100644 --- a/packages/client/test/sim/configs/mainnet.json +++ b/packages/client/test/sim/configs/mainnet.json @@ -12,7 +12,9 @@ "berlinBlock": 0, "londonBlock": 0, "mergeForkBlock": 0, - "terminalTotalDifficulty": 1 + "shanghaiTime": 0, + "terminalTotalDifficulty": 1, + "terminalTotalDifficultyPassed": true }, "alloc": { "0x0000000000000000000000000000000000000000": { diff --git a/packages/client/test/sim/mainnet.spec.ts b/packages/client/test/sim/mainnet.spec.ts index 90f00ee532..953fe85b29 100644 --- a/packages/client/test/sim/mainnet.spec.ts +++ b/packages/client/test/sim/mainnet.spec.ts @@ -25,6 +25,9 @@ export async function runTx(data: string, to?: string, value?: bigint) { } describe('simple mainnet test run', async () => { + if (process.env.EXTRA_CL_PARAMS === undefined) { + process.env.EXTRA_CL_PARAMS = '--params.CAPELLA_FORK_EPOCH 0' + } const { teardownCallBack, result } = await startNetwork(network, client, { filterKeywords, filterOutWords, @@ -48,26 +51,30 @@ describe('simple mainnet test run', async () => { const blockHashes: string[] = [] // ------------Sanity checks-------------------------------- - it('Simple transfer - sanity check', async () => { - await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) - let balance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - assert.equal(BigInt(balance.result), 1000000n, 'sent a simple ETH transfer') - await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) - balance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - balance = await client.request('eth_getBalance', [ - '0x3dA33B9A0894b908DdBb00d96399e506515A1009', - 'latest', - ]) - assert.equal(BigInt(balance.result), 2000000n, 'sent a simple ETH transfer 2x') - const latestBlock = await client.request('eth_getBlockByNumber', ['latest', false]) - blockHashes.push(latestBlock.result.hash) - }) + it( + 'Simple transfer - sanity check', + async () => { + await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) + let balance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + assert.equal(BigInt(balance.result), 1000000n, 'sent a simple ETH transfer') + await runTx('', '0x3dA33B9A0894b908DdBb00d96399e506515A1009', 1000000n) + balance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + balance = await client.request('eth_getBalance', [ + '0x3dA33B9A0894b908DdBb00d96399e506515A1009', + 'latest', + ]) + assert.equal(BigInt(balance.result), 2000000n, 'sent a simple ETH transfer 2x') + const latestBlock = await client.request('eth_getBlockByNumber', ['latest', false]) + blockHashes.push(latestBlock.result.hash) + }, + 2 * 60_000 + ) it('Validate execution hashes present in beacon headers', async () => { const eth2res = await (await fetch('http://127.0.0.1:9596/eth/v1/beacon/headers')).json() @@ -77,7 +84,7 @@ describe('simple mainnet test run', async () => { parseInt(eth2res.data[0].header.message.slot), blockHashes ) - }) + }, 60_000) it('should reset td', async () => { try { @@ -86,5 +93,5 @@ describe('simple mainnet test run', async () => { } catch (e) { assert.fail('network not cleaned properly') } - }) + }, 60_000) }) diff --git a/packages/client/test/sim/simutils.ts b/packages/client/test/sim/simutils.ts index e629279886..c9e6f0d7e4 100644 --- a/packages/client/test/sim/simutils.ts +++ b/packages/client/test/sim/simutils.ts @@ -1,3 +1,4 @@ +import { executionPayloadFromBeaconPayload } from '@ethereumjs/block' import { Blockchain } from '@ethereumjs/blockchain' import { BlobEIP4844Transaction, FeeMarketEIP1559Transaction } from '@ethereumjs/tx' import { @@ -16,10 +17,12 @@ import * as fs from 'fs/promises' import { Level } from 'level' import { execSync, spawn } from 'node:child_process' import * as net from 'node:net' +import qs from 'qs' import { EthereumClient } from '../../src/client' import { Config } from '../../src/config' import { LevelDB } from '../../src/execution/level' +import { RPCManager } from '../../src/rpc' import type { Common } from '@ethereumjs/common' import type { TransactionType, TxData, TxOptions } from '@ethereumjs/tx' @@ -27,6 +30,24 @@ import type { ChildProcessWithoutNullStreams } from 'child_process' import type { Client } from 'jayson/promise' export const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms)) +// This function switches between the native web implementation and a nodejs implemnetation +export async function getEventSource(): Promise { + // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions + if (globalThis.EventSource) { + return EventSource + } else { + return (await import('eventsource')).default as unknown as typeof EventSource + } +} + +/** + * Ethereum Beacon API requires the query with format: + * - arrayFormat: repeat `topic=topic1&topic=topic2` + */ +export function stringifyQuery(query: unknown): string { + return qs.stringify(query, { arrayFormat: 'repeat' }) +} + // Initialize the kzg object with the kzg library initKZG(kzg, __dirname + '/../../src/trustedSetups/devnet6.txt') @@ -407,9 +428,13 @@ export const runBlobTxsFromFile = async (client: Client, path: string) => { return txnHashes } -export async function createInlineClient(config: any, common: any, customGenesisState: any) { +export async function createInlineClient( + config: any, + common: any, + customGenesisState: any, + datadir: any = Config.DATADIR_DEFAULT +) { config.events.setMaxListeners(50) - const datadir = Config.DATADIR_DEFAULT const chainDB = new Level( `${datadir}/${common.chainName()}/chainDB` ) @@ -442,6 +467,128 @@ export async function createInlineClient(config: any, common: any, customGenesis return inlineClient } +export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconUrl: string) { + // track head + const topics = ['head'] + const EventSource = await getEventSource() + const query = stringifyQuery({ topics }) + console.log({ query }) + const eventSource = new EventSource(`${peerBeaconUrl}/eth/v1/events?${query}`) + const manager = new RPCManager(client, client.config) + const engineMethods = manager.getMethods(true) + console.log('engineMethods', Object.keys(engineMethods)) + + // possible values: STARTED, PAUSED, ERRORED, SYNCING, VALID + let syncState = 'PAUSED' + let errorMessage = '' + const updateState = (newState) => { + if (syncState !== 'PAUSED') { + syncState = newState + } + } + + const playUpdate = async (payload, finalizedBlockHash, version) => { + if (version !== 'capella') { + throw Error('only capella replay supported yet') + } + console.log('playUpdate', payload, { finalizedBlockHash }) + + try { + const newPayloadRes = await engineMethods['engine_newPayloadV2']([payload]) + if ( + newPayloadRes.status === undefined || + !['SYNCING', 'VALID', 'ACCEPTED'].includes(newPayloadRes.status) + ) { + throw Error( + `newPayload error: status${newPayloadRes.status} validationError=${newPayloadRes.validationError} error=${newPayloadRes.error}` + ) + } + + const fcUState = { + headBlockHash: payload.blockHash, + safeBlockHash: finalizedBlockHash, + finalizedBlockHash, + } + console.log({ fcUState }) + const fcuRes = await engineMethods['engine_forkchoiceUpdatedV2']([fcUState]) + if ( + fcuRes.payloadStatus === undefined || + !['SYNCING', 'VALID', 'ACCEPTED'].includes(newPayloadRes.status) + ) { + throw Error(`fcU error: error:${fcuRes.error} message=${fcuRes.message}`) + } else { + updateState(fcuRes.payloadStatus.status) + } + } catch (e) { + console.log('playUpdate error', e) + updateState('ERRORED') + } + } + + // ignoring the actual even, just using it as trigger to feed + eventSource.addEventListener(topics[0], (async (_event: MessageEvent) => { + if (syncState === 'PAUSED') return + try { + // just fetch finalized updated, it has all relevant hashesh to fcU + const beaconFinalized = await ( + await fetch(`${peerBeaconUrl}/eth/v1/beacon/light_client/finality_update`) + ).json() + console.log({ beaconFinalized: beaconFinalized.data?.finalized_header }) + if (beaconFinalized.error !== undefined) { + if (beaconFinalized.message?.includes('No finality update available') === true) { + // waiting for finality + return + } else { + throw Error(beaconFinalized.message ?? 'finality update fetch error') + } + } + + const beaconHead = await (await fetch(`${peerBeaconUrl}/eth/v2/beacon/blocks/head`)).json() + + const payload = executionPayloadFromBeaconPayload( + beaconHead.data.message.body.execution_payload + ) + console.log('beaconFinalized', beaconFinalized.data.finalized_header) + const finalizedBlockHash = beaconFinalized.data.finalized_header.execution.block_hash + console.log('playing update', { payload, finalizedBlockHash }) + + await playUpdate(payload, finalizedBlockHash, beaconHead.version) + } catch (e) { + console.log('errored -----', e) + updateState('ERRORED') + errorMessage = (e as Error).message + } + }) as EventListener) + + const start = () => { + if (syncState === 'PAUSED') syncState = 'STARTED' + return new Promise((resolve, reject) => { + const resolveOnSynced = () => { + console.log('resolveOnSynced', { syncState }) + if (syncState === 'VALID') { + resolve({ syncState }) + } else if (syncState === 'INVALID') { + reject(Error(errorMessage)) + } + + client.config.events.removeListener(resolveOnSynced) + } + client.config.events.on(Event.CHAIN_UPDATED, resolveOnSynced) + }) + } + const pause = () => { + syncState = 'PAUSED' + } + + return { + syncState, + playUpdate, + eventSource, + start, + pause, + } +} + // To minimise noise on the spec run, selective filteration is applied to let the important events // of the testnet log to show up in the spec log export const filterKeywords = [ diff --git a/packages/client/test/sim/single-run.sh b/packages/client/test/sim/single-run.sh index cee4fa2c2c..734f4da034 100755 --- a/packages/client/test/sim/single-run.sh +++ b/packages/client/test/sim/single-run.sh @@ -39,7 +39,7 @@ then echo "geth requires NETWORKID to be passed in env, exiting..." exit; fi; - ELCLIENT_IMAGE="ethereum/client-go:v1.11.6" + ELCLIENT_IMAGE="ethereum/client-go:v1.12.2" echo "ELCLIENT=$ELCLIENT using ELCLIENT_IMAGE=$ELCLIENT_IMAGE NETWORKID=$NETWORKID" ;; *) @@ -233,8 +233,13 @@ then done; fi + if [ ! -n "$GENESIS_DELAY" ] + then + GENESIS_DELAY=30 + fi; + genTime="$(date +%s)" - genTime=$((genTime + 30)) + genTime=$((genTime + $GENESIS_DELAY)) echo $genTime > "$origDataDir/genesisTime" echo $GENESIS_HASH > "$origDataDir/genesisHash" else diff --git a/packages/client/test/sim/snapsync.spec.ts b/packages/client/test/sim/snapsync.spec.ts index 6459e51c69..7ebc338b5b 100644 --- a/packages/client/test/sim/snapsync.spec.ts +++ b/packages/client/test/sim/snapsync.spec.ts @@ -53,6 +53,9 @@ export async function runTx(data: string, to?: string, value?: bigint) { } describe('simple mainnet test run', async () => { + if (process.env.EXTRA_CL_PARAMS === undefined) { + process.env.EXTRA_CL_PARAMS = '--params.CAPELLA_FORK_EPOCH 0' + } // Better add it as a option in startnetwork process.env.NETWORKID = `${common.networkId()}` const { teardownCallBack, result } = await startNetwork(network, client, { @@ -145,7 +148,7 @@ describe('simple mainnet test run', async () => { 60_000 ) - it( + it.skipIf(process.env.SNAP_SYNC === undefined)( 'should snap sync and finish', async () => { if (ejsClient !== null && snapCompleted !== undefined) { From a2abdf7f087454d5c6a92da12c7a9de2ec617dab Mon Sep 17 00:00:00 2001 From: harkamal Date: Wed, 13 Sep 2023 14:37:34 +0530 Subject: [PATCH 5/8] get the beacon sync to resolve --- packages/client/test/sim/beaconsync.md | 9 ++++-- packages/client/test/sim/beaconsync.spec.ts | 5 ++-- packages/client/test/sim/simutils.ts | 32 +++++++++++---------- 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/packages/client/test/sim/beaconsync.md b/packages/client/test/sim/beaconsync.md index 5350010e1b..b0782c7c66 100644 --- a/packages/client/test/sim/beaconsync.md +++ b/packages/client/test/sim/beaconsync.md @@ -13,14 +13,19 @@ Note: All commands should be run from the `client` package directory root (so so ## How to run -1. Cleanup some datadirs +1. Cleanup some datadirs (if you have had previous runs) ```bash -rm -rf ./datadir (if you have had previous runs) +rm -rf ./datadir ``` 2. Run the sim ```bash BEACON_SYNC=true NETWORK=mainnet NETWORKID=1337903 ELCLIENT=geth npx vitest run test/sim/beaconsync.spec.ts +``` + +or just +```bash +rm -rf ./datadir; DEBUG=ethjs,client:* BEACON_SYNC=true NETWORK=mainnet NETWORKID=1337903 ELCLIENT=geth npx vitest run test/sim/beaconsync.spec.ts ``` \ No newline at end of file diff --git a/packages/client/test/sim/beaconsync.spec.ts b/packages/client/test/sim/beaconsync.spec.ts index adc556a52f..07bde66538 100644 --- a/packages/client/test/sim/beaconsync.spec.ts +++ b/packages/client/test/sim/beaconsync.spec.ts @@ -143,6 +143,7 @@ describe('simple mainnet test run', async () => { await Promise.race([peerConnectedPromise, peerConnectTimeout]) assert.ok(true, 'connected to geth peer') } catch (e) { + console.log(e) assert.fail('could not connect to geth peer in 10 seconds') } }, @@ -160,8 +161,8 @@ describe('simple mainnet test run', async () => { try { // call sync if not has been called yet void ejsClient.services[0].synchronizer?.sync() - await Promise.race([beaconSyncPromise, syncTimeout]) - assert(beaconSyncRelayer.status, 'SYNCED', 'beaconSyncRelayer should have synced client') + const syncResponse = await Promise.race([beaconSyncPromise, syncTimeout]) + assert(syncResponse.syncState, 'SYNCED', 'beaconSyncRelayer should have synced client') await ejsClient.stop() assert.ok(true, 'completed beacon sync') } catch (e) { diff --git a/packages/client/test/sim/simutils.ts b/packages/client/test/sim/simutils.ts index c9e6f0d7e4..33f6bd4960 100644 --- a/packages/client/test/sim/simutils.ts +++ b/packages/client/test/sim/simutils.ts @@ -476,10 +476,10 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU const eventSource = new EventSource(`${peerBeaconUrl}/eth/v1/events?${query}`) const manager = new RPCManager(client, client.config) const engineMethods = manager.getMethods(true) - console.log('engineMethods', Object.keys(engineMethods)) // possible values: STARTED, PAUSED, ERRORED, SYNCING, VALID let syncState = 'PAUSED' + let pollInterval = null let errorMessage = '' const updateState = (newState) => { if (syncState !== 'PAUSED') { @@ -491,7 +491,12 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU if (version !== 'capella') { throw Error('only capella replay supported yet') } - console.log('playUpdate', payload, { finalizedBlockHash }) + const fcUState = { + headBlockHash: payload.blockHash, + safeBlockHash: finalizedBlockHash, + finalizedBlockHash, + } + console.log('playUpdate', fcUState) try { const newPayloadRes = await engineMethods['engine_newPayloadV2']([payload]) @@ -504,12 +509,6 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU ) } - const fcUState = { - headBlockHash: payload.blockHash, - safeBlockHash: finalizedBlockHash, - finalizedBlockHash, - } - console.log({ fcUState }) const fcuRes = await engineMethods['engine_forkchoiceUpdatedV2']([fcUState]) if ( fcuRes.payloadStatus === undefined || @@ -533,7 +532,6 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU const beaconFinalized = await ( await fetch(`${peerBeaconUrl}/eth/v1/beacon/light_client/finality_update`) ).json() - console.log({ beaconFinalized: beaconFinalized.data?.finalized_header }) if (beaconFinalized.error !== undefined) { if (beaconFinalized.message?.includes('No finality update available') === true) { // waiting for finality @@ -548,31 +546,35 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU const payload = executionPayloadFromBeaconPayload( beaconHead.data.message.body.execution_payload ) - console.log('beaconFinalized', beaconFinalized.data.finalized_header) const finalizedBlockHash = beaconFinalized.data.finalized_header.execution.block_hash - console.log('playing update', { payload, finalizedBlockHash }) await playUpdate(payload, finalizedBlockHash, beaconHead.version) } catch (e) { - console.log('errored -----', e) + console.log('update fetch error', e) updateState('ERRORED') errorMessage = (e as Error).message } }) as EventListener) const start = () => { + if (pollInterval !== null) { + throw Error('Already waiting on sync') + } if (syncState === 'PAUSED') syncState = 'STARTED' return new Promise((resolve, reject) => { const resolveOnSynced = () => { - console.log('resolveOnSynced', { syncState }) + console.log('resolve sync', { syncState }) if (syncState === 'VALID') { resolve({ syncState }) + pollInterval = null } else if (syncState === 'INVALID') { + console.log('rejected sync', { syncState }) reject(Error(errorMessage)) + pollInterval = null } - - client.config.events.removeListener(resolveOnSynced) } + + pollInterval = setInterval(resolveOnSynced, 6000) client.config.events.on(Event.CHAIN_UPDATED, resolveOnSynced) }) } From 046110ca454b06c3bc1c698af39855b755afb658 Mon Sep 17 00:00:00 2001 From: harkamal Date: Wed, 13 Sep 2023 17:35:39 +0530 Subject: [PATCH 6/8] fix breaking skeleton spec --- packages/client/src/sync/skeleton.ts | 10 ++++------ packages/client/test/sim/simutils.ts | 2 +- packages/client/test/sync/skeleton.spec.ts | 15 ++++++--------- 3 files changed, 11 insertions(+), 16 deletions(-) diff --git a/packages/client/src/sync/skeleton.ts b/packages/client/src/sync/skeleton.ts index 82e989b028..58adfbbfff 100644 --- a/packages/client/src/sync/skeleton.ts +++ b/packages/client/src/sync/skeleton.ts @@ -322,7 +322,10 @@ export class Skeleton extends MetaDBManager { head.hash() )} force=${force}` ) + + const subchain0Head = this.status.progress.subchains[0]?.head ?? BigInt(0) const reorg = await this.processNewHead(head, force) + if (force && reorg) { // It could just be a reorg at this head with previous tail preserved unless // 1. parent is not present in skeleton (it could be in chain for whatever reason) or @@ -363,12 +366,7 @@ export class Skeleton extends MetaDBManager { if (force || init) { await this.writeSyncStatus() } - if ( - (force && - this.status.linked && - head.header.number > this.status.progress.subchains[0]?.head) ?? - BigInt(0) - ) { + if (force && this.status.linked && head.header.number > subchain0Head) { void this.fillCanonicalChain() } // Earlier we were throwing on reorg, essentially for the purposes for killing the reverse fetcher diff --git a/packages/client/test/sim/simutils.ts b/packages/client/test/sim/simutils.ts index 33f6bd4960..7b229378cd 100644 --- a/packages/client/test/sim/simutils.ts +++ b/packages/client/test/sim/simutils.ts @@ -563,7 +563,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU if (syncState === 'PAUSED') syncState = 'STARTED' return new Promise((resolve, reject) => { const resolveOnSynced = () => { - console.log('resolve sync', { syncState }) + console.log('resolve sync check', { syncState }) if (syncState === 'VALID') { resolve({ syncState }) pollInterval = null diff --git a/packages/client/test/sync/skeleton.spec.ts b/packages/client/test/sync/skeleton.spec.ts index 857cb7e938..a1c07ece63 100644 --- a/packages/client/test/sync/skeleton.spec.ts +++ b/packages/client/test/sync/skeleton.spec.ts @@ -178,14 +178,11 @@ describe('[Skeleton] / initSync', async () => { // header. We expect the old subchain to be truncated and a new chain // be created for the dangling head. { - name: 'The old subchain to be truncated and a new chain be created for the dangling head', + name: 'The old subchains to be truncated/cleared and a new chain be created for the dangling head', blocks: [block49B], oldState: [{ head: BigInt(100), tail: BigInt(5) }], head: block50, - newState: [ - { head: BigInt(50), tail: BigInt(50) }, - { head: BigInt(49), tail: BigInt(5) }, - ], + newState: [{ head: BigInt(50), tail: BigInt(50) }], }, ] for (const [testCaseIndex, testCase] of testCases.entries()) { @@ -193,7 +190,7 @@ describe('[Skeleton] / initSync', async () => { const config = new Config({ common, transports: [], - logger: getLogger({ loglevel: 'debug' }), + logger: getLogger({ logLevel: 'debug' }), accountCache: 10000, storageCache: 1000, }) @@ -309,7 +306,7 @@ describe('[Skeleton] / setHead', async () => { const config = new Config({ common, transports: [], - logger: getLogger({ loglevel: 'debug' }), + logger: getLogger({ logLevel: 'debug' }), accountCache: 10000, storageCache: 1000, }) @@ -493,7 +490,7 @@ describe('[Skeleton] / setHead', async () => { }) it('should fill the canonical chain after being linked to genesis', async () => { - const config = new Config({ common, transports: [] }) + const config = new Config({ common, transports: [], logger: getLogger({ logLevel: 'debug' }) }) const chain = await Chain.create({ config }) ;(chain.blockchain as any)._validateBlocks = false const skeleton = new Skeleton({ chain, config, metaDB: new MemoryLevel() }) @@ -819,7 +816,7 @@ describe('[Skeleton] / setHead', async () => { const config = new Config({ transports: [], common, - logger: getLogger({ loglevel: 'debug' }), + logger: getLogger({ logLevel: 'debug' }), accountCache: 10000, storageCache: 1000, }) From 315d3b08c69e2440c203a8ccf44c7790f1de6fb2 Mon Sep 17 00:00:00 2001 From: harkamal Date: Wed, 13 Sep 2023 18:18:28 +0530 Subject: [PATCH 7/8] fix reverse block fetcher --- packages/client/src/sync/skeleton.ts | 2 +- .../sync/fetcher/reverseblockfetcher.spec.ts | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/packages/client/src/sync/skeleton.ts b/packages/client/src/sync/skeleton.ts index 58adfbbfff..9cb69e5863 100644 --- a/packages/client/src/sync/skeleton.ts +++ b/packages/client/src/sync/skeleton.ts @@ -448,7 +448,7 @@ export class Skeleton extends MetaDBManager { // if subChain1Head is not in the skeleton then all previous subchains are not useful // and better to junk this.config.logger.debug( - `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head}` + `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head} or its tail=${this.status.progress.subchains[1].tail}` ) this.status.progress.subchains.splice(1, this.status.progress.subchains.length - 1) } else if ( diff --git a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts index de345249b8..05cd12de2d 100644 --- a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts @@ -5,6 +5,7 @@ import { assert, describe, it } from 'vitest' import { Chain } from '../../../src/blockchain/chain' import { Config } from '../../../src/config' +import { getLogger } from '../../../src/logging' import { Skeleton } from '../../../src/sync' import { Event } from '../../../src/types' import { wait } from '../../integration/util' @@ -221,6 +222,7 @@ describe('[ReverseBlockFetcher]', async () => { accountCache: 10000, storageCache: 1000, skeletonSubchainMergeMinimum: 0, + logger: getLogger({ logLevel: 'debug' }), }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -251,11 +253,24 @@ describe('[ReverseBlockFetcher]', async () => { }, { setHardfork: true } ) + const block4 = Block.fromBlockData( + { + header: { number: BigInt(4), difficulty: BigInt(1) }, + }, + { setHardfork: true } + ) + const block5 = Block.fromBlockData( + { + header: { number: BigInt(5), difficulty: BigInt(1), parentHash: block4.hash() }, + }, + { setHardfork: true } + ) ;(skeleton as any).status.progress.subchains = [ { head: BigInt(100), tail: BigInt(50), next: block49.hash() }, - { head: BigInt(48), tail: BigInt(5) }, + { head: BigInt(48), tail: BigInt(5), next: block4.hash() }, ] await (skeleton as any).putBlock(block47) + await (skeleton as any).putBlock(block5) await fetcher.store([block49, block48]) assert.ok( (skeleton as any).status.progress.subchains.length === 1, From e6457eb0c85c10a6e7f02f7d4648344d278bf18c Mon Sep 17 00:00:00 2001 From: harkamal Date: Wed, 13 Sep 2023 20:06:27 +0530 Subject: [PATCH 8/8] apply feeback --- packages/client/test/sim/beaconsync.md | 4 ++-- packages/client/test/sim/beaconsync.spec.ts | 4 ++-- packages/client/test/sim/simutils.ts | 7 +++---- packages/client/test/sim/snapsync.md | 2 +- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/packages/client/test/sim/beaconsync.md b/packages/client/test/sim/beaconsync.md index b0782c7c66..cdaaaadc99 100644 --- a/packages/client/test/sim/beaconsync.md +++ b/packages/client/test/sim/beaconsync.md @@ -2,12 +2,12 @@ ## Prerequisites -1. Bash terminal +1. ZSH terminal 2. Docker (without sudo) 3. `jq` & `curl` installed 4. `ethereumjs-monorepo` codebase build via `npm i` (for e.g. at `/usr/app/ethereumjs`) -You may pre-download docker images for lodestar (`docker pull chainsafe/lodestar:latest`) and geth (`docker pull ethereum/client-go:v1.11.6`) to avoid any test timeout issues. +You may pre-download docker images for lodestar (`docker pull chainsafe/lodestar:latest`) and geth (`docker pull ethereum/client-go:v1.12.2`) to avoid any test timeout issues. Note: All commands should be run from the `client` package directory root (so something like `/usr/app/ethereumjs/packages/client`) diff --git a/packages/client/test/sim/beaconsync.spec.ts b/packages/client/test/sim/beaconsync.spec.ts index 07bde66538..d108db0b98 100644 --- a/packages/client/test/sim/beaconsync.spec.ts +++ b/packages/client/test/sim/beaconsync.spec.ts @@ -169,7 +169,7 @@ describe('simple mainnet test run', async () => { assert.fail('could not complete beacon sync in 8 minutes') } } else { - assert.fail('ethereumjs client not setup properly for snap sync') + assert.fail('ethereumjs client not setup properly for beacon sync') } }, 10 * 60_000 @@ -193,7 +193,7 @@ async function createBeaconSyncClient( datadir: any ) { // Turn on `debug` logs, defaults to all client logging - debug.enable(process.env.DEBUG_SNAP ?? '') + debug.enable(process.env.DEBUG_SYNC ?? '') const logger = getLogger({ logLevel: 'debug' }) const config = new Config({ common, diff --git a/packages/client/test/sim/simutils.ts b/packages/client/test/sim/simutils.ts index 7b229378cd..8356db1258 100644 --- a/packages/client/test/sim/simutils.ts +++ b/packages/client/test/sim/simutils.ts @@ -30,10 +30,9 @@ import type { ChildProcessWithoutNullStreams } from 'child_process' import type { Client } from 'jayson/promise' export const sleep = (ms: number) => new Promise((r) => setTimeout(r, ms)) -// This function switches between the native web implementation and a nodejs implemnetation +// This function switches between the native web implementation and a nodejs implementation export async function getEventSource(): Promise { - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - if (globalThis.EventSource) { + if (globalThis.EventSource !== undefined) { return EventSource } else { return (await import('eventsource')).default as unknown as typeof EventSource @@ -528,7 +527,7 @@ export async function setupEngineUpdateRelay(client: EthereumClient, peerBeaconU eventSource.addEventListener(topics[0], (async (_event: MessageEvent) => { if (syncState === 'PAUSED') return try { - // just fetch finalized updated, it has all relevant hashesh to fcU + // just fetch finalized updated, it has all relevant hashes for fcU const beaconFinalized = await ( await fetch(`${peerBeaconUrl}/eth/v1/beacon/light_client/finality_update`) ).json() diff --git a/packages/client/test/sim/snapsync.md b/packages/client/test/sim/snapsync.md index ea2f965935..75ca68b420 100644 --- a/packages/client/test/sim/snapsync.md +++ b/packages/client/test/sim/snapsync.md @@ -7,7 +7,7 @@ 3. `jq` & `curl` installed 4. `ethereumjs-monorepo` codebase build via `npm i` (for e.g. at `/usr/app/ethereumjs`) -You may pre-download docker images for lodestar (`docker pull chainsafe/lodestar:latest`) and geth (`docker pull ethereum/client-go:v1.11.6`) to avoid any test timeout issues. +You may pre-download docker images for lodestar (`docker pull chainsafe/lodestar:latest`) and geth (`docker pull ethereum/client-go:v1.12.2`) to avoid any test timeout issues. Note: All commands should be run from the `client` package directory root (so something like `/usr/app/ethereumjs/packages/client`)