From 8aa33ab4552bb58e2991a707c01fd55c9fd2a9de Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Mon, 25 Sep 2023 16:06:45 +0200 Subject: [PATCH 01/17] Initial work on the hardhat-zksync-node plugin --- package.json | 4 +- packages/hardhat-zksync-node/.eslintrc.js | 7 + packages/hardhat-zksync-node/.mocharc.json | 5 + packages/hardhat-zksync-node/.prettierignore | 5 + packages/hardhat-zksync-node/README.md | 3 + packages/hardhat-zksync-node/package.json | 71 ++++++++++ packages/hardhat-zksync-node/src/constants.ts | 25 ++++ .../hardhat-zksync-node/src/downloader.ts | 20 +++ packages/hardhat-zksync-node/src/errors.ts | 8 ++ packages/hardhat-zksync-node/src/index.ts | 128 ++++++++++++++++++ packages/hardhat-zksync-node/src/server.ts | 26 ++++ packages/hardhat-zksync-node/src/types.ts | 13 ++ packages/hardhat-zksync-node/src/utils.ts | 123 +++++++++++++++++ .../hardhat-zksync-node/test/.eslintrc.js | 11 ++ .../hardhat-zksync-node/test/common.config.ts | 19 +++ .../test/fixture-projects/.gitignore | 2 + .../simple/contracts/Greeter.sol | 18 +++ .../fixture-projects/simple/hardhat.config.js | 1 + packages/hardhat-zksync-node/test/helpers.ts | 24 ++++ packages/hardhat-zksync-node/test/tests.ts | 16 +++ packages/hardhat-zksync-node/tsconfig.json | 7 + yarn.lock | 9 +- 22 files changed, 542 insertions(+), 3 deletions(-) create mode 100644 packages/hardhat-zksync-node/.eslintrc.js create mode 100644 packages/hardhat-zksync-node/.mocharc.json create mode 100644 packages/hardhat-zksync-node/.prettierignore create mode 100644 packages/hardhat-zksync-node/README.md create mode 100644 packages/hardhat-zksync-node/package.json create mode 100644 packages/hardhat-zksync-node/src/constants.ts create mode 100644 packages/hardhat-zksync-node/src/downloader.ts create mode 100644 packages/hardhat-zksync-node/src/errors.ts create mode 100644 packages/hardhat-zksync-node/src/index.ts create mode 100644 packages/hardhat-zksync-node/src/server.ts create mode 100644 packages/hardhat-zksync-node/src/types.ts create mode 100644 packages/hardhat-zksync-node/src/utils.ts create mode 100644 packages/hardhat-zksync-node/test/.eslintrc.js create mode 100644 packages/hardhat-zksync-node/test/common.config.ts create mode 100644 packages/hardhat-zksync-node/test/fixture-projects/.gitignore create mode 100644 packages/hardhat-zksync-node/test/fixture-projects/simple/contracts/Greeter.sol create mode 100644 packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.js create mode 100644 packages/hardhat-zksync-node/test/helpers.ts create mode 100644 packages/hardhat-zksync-node/test/tests.ts create mode 100644 packages/hardhat-zksync-node/tsconfig.json diff --git a/package.json b/package.json index e988aad7b..420f4023e 100644 --- a/package.json +++ b/package.json @@ -18,8 +18,8 @@ "wsrun": "^5.2.2" }, "scripts": { - "build": "tsc --build packages/hardhat-zksync-solc packages/hardhat-zksync-deploy packages/hardhat-zksync-vyper packages/hardhat-zksync-chai-matchers packages/hardhat-zksync-verify packages/hardhat-zksync-toolbox packages/hardhat-zksync-upgradable packages/hardhat-zksync-verify-vyper", - "watch": "tsc --build --watch packages/hardhat-zksync-solc packages/hardhat-zksync-deploy packages/hardhat-zksync-vyper packages/hardhat-zksync-chai-matchers packages/hardhat-zksync-verify packages/hardhat-zksync-toolbox packages/hardhat-zksync-upgradable packages/hardhat-zksync-verify-vyper", + "build": "tsc --build packages/hardhat-zksync-solc packages/hardhat-zksync-deploy packages/hardhat-zksync-vyper packages/hardhat-zksync-chai-matchers packages/hardhat-zksync-verify packages/hardhat-zksync-toolbox packages/hardhat-zksync-upgradable packages/hardhat-zksync-verify-vyper packages/hardhat-zksync-node", + "watch": "tsc --build --watch packages/hardhat-zksync-solc packages/hardhat-zksync-deploy packages/hardhat-zksync-vyper packages/hardhat-zksync-chai-matchers packages/hardhat-zksync-verify packages/hardhat-zksync-toolbox packages/hardhat-zksync-upgradable packages/hardhat-zksync-verify-vyper packages/hardhat-zksync-node", "clean": "wsrun --exclude-missing clean", "lint": "wsrun --exclude-missing --stages lint", "lint:fix": "wsrun --exclude-missing --stages lint:fix", diff --git a/packages/hardhat-zksync-node/.eslintrc.js b/packages/hardhat-zksync-node/.eslintrc.js new file mode 100644 index 000000000..889740f22 --- /dev/null +++ b/packages/hardhat-zksync-node/.eslintrc.js @@ -0,0 +1,7 @@ +module.exports = { + extends: [`${__dirname}/../../config/eslint/eslintrc.js`], + parserOptions: { + project: `${__dirname}/tsconfig.json`, + sourceType: "module", + }, +}; diff --git a/packages/hardhat-zksync-node/.mocharc.json b/packages/hardhat-zksync-node/.mocharc.json new file mode 100644 index 000000000..d00ceb413 --- /dev/null +++ b/packages/hardhat-zksync-node/.mocharc.json @@ -0,0 +1,5 @@ +{ + "require": "ts-node/register/files", + "ignore": ["test/fixture-projects/**/*"], + "timeout": 10000 +} diff --git a/packages/hardhat-zksync-node/.prettierignore b/packages/hardhat-zksync-node/.prettierignore new file mode 100644 index 000000000..37cbd4e3f --- /dev/null +++ b/packages/hardhat-zksync-node/.prettierignore @@ -0,0 +1,5 @@ +/node_modules +/dist +/test/fixture-projects/**/artifacts +/test/fixture-projects/**/cache +CHANGELOG.md diff --git a/packages/hardhat-zksync-node/README.md b/packages/hardhat-zksync-node/README.md new file mode 100644 index 000000000..4b30a29c2 --- /dev/null +++ b/packages/hardhat-zksync-node/README.md @@ -0,0 +1,3 @@ +# hardhat-zksync-node + +[Hardhat](https://hardhat.org/) plugin to run the zkSync node locally. diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json new file mode 100644 index 000000000..a475f673f --- /dev/null +++ b/packages/hardhat-zksync-node/package.json @@ -0,0 +1,71 @@ +{ + "name": "@matterlabs/hardhat-zksync-node", + "version": "0.0.1", + "description": "Hardhat plugin to run zkSync node locally", + "repository": "github:matter-labs/hardhat-zksync", + "homepage": "https://github.com/matter-labs/hardhat-zksync/tree/main/packages/hardhat-zksync-node", + "author": "Matter Labs", + "license": "MIT", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "keywords": [ + "ethereum", + "smart-contracts", + "hardhat", + "hardhat-plugin", + "zkSync" + ], + "scripts": { + "lint": "yarn prettier --check && yarn eslint", + "lint:fix": "yarn eslint --fix", + "fmt": "yarn prettier --write", + "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", + "prettier": "prettier 'src/**/*.ts' 'test/**/*.ts'", + "test": "mocha test/tests.ts --exit", + "build": "tsc --build .", + "clean": "rimraf dist" + }, + "files": [ + "dist/", + "src/", + "LICENSE", + "README.md" + ], + "dependencies": { + "@matterlabs/hardhat-zksync-solc": "0.4.2", + "chalk": "4.1.2", + "ts-morph": "^19.0.0" + }, + "devDependencies": { + "@types/chai": "^4.2.0", + "@types/mocha": "^9.1.0", + "@types/node": "^18.11.17", + "@typescript-eslint/eslint-plugin": "5.13.0", + "@typescript-eslint/parser": "5.13.0", + "chai": "^4.3.6", + "eslint": "^8.10.0", + "eslint-config-prettier": "8.4.0", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-prettier": "4.0.0", + "ethers": "~5.7.2", + "hardhat": "^2.14.0", + "mocha": "^9.2.1", + "prettier": "2.5.1", + "rimraf": "^3.0.2", + "ts-node": "^10.6.0", + "typescript": "^4.6.2", + "zksync-web3": "^0.14.3" + }, + "peerDependencies": { + "ethers": "~5.7.2", + "hardhat": "^2.14.0", + "zksync-web3": "^0.14.3" + }, + "prettier": { + "tabWidth": 4, + "printWidth": 120, + "parser": "typescript", + "singleQuote": true, + "bracketSpacing": true + } +} diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts new file mode 100644 index 000000000..7fc917c30 --- /dev/null +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -0,0 +1,25 @@ +export const PLUGIN_NAME = '@matterlabs/hardhat-zksync-node'; + +export const TASK_NODE_ZKSYNC = 'node-zksync'; +export const TASK_NODE_ZKSYNC_CREATE_SERVER = 'node-zksync:create-server'; + +export const PROCESS_TERMINATION_SIGNALS = ['SIGINT', 'SIGTERM']; + +export const ALLOWED_LOG_VALUES = ['error', 'warn', 'info', 'debug']; +export const ALLOWED_CACHE_VALUES = ['none', 'disk', 'memory']; +export const ALLOWED_FORK_VALUES = ['testnet', 'mainnet']; +export const ALLOWED_SHOW_STORAGE_LOGS_VALUES = ['none', 'read', 'write', 'all']; +export const ALLOWED_SHOW_VM_DETAILS_VALUES = ['none', 'all']; +export const ALLOWED_SHOW_GAS_DETAILS_VALUES = ['none', 'all']; + +export const PLATFORM_MAP: Record = { + darwin: 'macosx', + linux: 'linux', + win32: 'windows', +}; + +export const TOOLCHAIN_MAP: Record = { + linux: '-musl', + win32: '-gnu', + darwin: '', +}; diff --git a/packages/hardhat-zksync-node/src/downloader.ts b/packages/hardhat-zksync-node/src/downloader.ts new file mode 100644 index 000000000..2b056f48b --- /dev/null +++ b/packages/hardhat-zksync-node/src/downloader.ts @@ -0,0 +1,20 @@ +export class RPCServerBinaryDownloader { + private readonly _binaryPath: string; + + constructor(binaryPath: string) { + this._binaryPath = binaryPath; + } + + public async isBinaryDownloaded(): Promise { + // Check if the binary file exists at the _binaryPath location. + return false; + } + + public async download(): Promise { + // Download the binary file to the _binaryPath location. + } + + public get binaryPath(): string { + return this._binaryPath; + } +} \ No newline at end of file diff --git a/packages/hardhat-zksync-node/src/errors.ts b/packages/hardhat-zksync-node/src/errors.ts new file mode 100644 index 000000000..1843f3751 --- /dev/null +++ b/packages/hardhat-zksync-node/src/errors.ts @@ -0,0 +1,8 @@ +import { HardhatPluginError } from 'hardhat/plugins'; +import { PLUGIN_NAME } from './constants'; + +export class ZkSyncNodePluginError extends HardhatPluginError { + constructor(message: string, parentError?: Error) { + super(PLUGIN_NAME, message, parentError); + } +} diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts new file mode 100644 index 000000000..7854f1c4c --- /dev/null +++ b/packages/hardhat-zksync-node/src/index.ts @@ -0,0 +1,128 @@ +import { task, subtask, types } from "hardhat/config"; + +import { TASK_NODE_ZKSYNC, TASK_NODE_ZKSYNC_CREATE_SERVER } from "./constants"; +import { JsonRpcServer } from "./server"; +import { constructCommandArgs, getRPCServerBinariesDir } from "./utils"; +import { RPCServerBinaryDownloader } from "./downloader"; + +//TODO: Add more tasks + +task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") + .addOptionalParam( + "log", + "Logging level (error, warn, info, debug)", + undefined, + types.string + ) + .addOptionalParam( + "logFilePath", + "Path to the file where logs should be written", + undefined, + types.string + ) + .addOptionalParam( + "cache", + "Cache network request (none, disk, memory)", + undefined, + types.string + ) + .addOptionalParam( + "cacheDir", + "Path to the directory where cache should be stored", + undefined, + types.string + ) + .addFlag( + "resetCache", + "Reset cache before start", + ) + .addOptionalParam( + "fork", + "Fork from the specified network (testnet, mainnet)", + undefined, + types.string + ) + .addOptionalParam( + "showSorageLogs", + "Show storage logs (none, read, write, all)", + undefined, + types.string + ) + .addOptionalParam( + "showVmDetails", + "Show VM details (none, all)", + undefined, + types.string + ) + .addOptionalParam( + "showGasDetails", + "Show gas details (none, all)", + undefined, + types.string + ) + .addFlag( + "showCalls", + "Print more detailed call traces" + ) + .addFlag( + "resolveHashes", + "Ask openchain for ABI names" + ) + .setAction( + async ( + { + log, + logFilePath, + cache, + cacheDir, + resetCache, + fork, + showStorageLogs, + showVmDetails, + showGasDetails, + showCalls, + resolveHashes, + }: { + log: string; + logFilePath: string; + cache: string; + cacheDir: string; + resetCache: boolean; + fork: string; + showStorageLogs: string; + showVmDetails: string; + showGasDetails: string; + showCalls: boolean; + resolveHashes: boolean; + }, + hre + ) => { + const commandArgs = constructCommandArgs({ + log, + logFilePath, + cache, + cacheDir, + resetCache, + fork, + showStorageLogs, + showVmDetails, + showGasDetails, + showCalls, + resolveHashes, + }); + + const rpcServerBinariyPath = await getRPCServerBinariesDir(); + const downloader: RPCServerBinaryDownloader = new RPCServerBinaryDownloader(rpcServerBinariyPath); + console.log(downloader.binaryPath); + + //TODO: Change this to the path of the binary + const binaryPath = "/Users/milivojepopovac/TxFusion/Templates/era-test-node/target/debug/era_test_node"; + const server: JsonRpcServer = new JsonRpcServer(binaryPath); + + try { + server.listen(commandArgs); // Add any arguments if needed + } catch (error: any) { + throw error; + } + } + ); diff --git a/packages/hardhat-zksync-node/src/server.ts b/packages/hardhat-zksync-node/src/server.ts new file mode 100644 index 000000000..c29f56585 --- /dev/null +++ b/packages/hardhat-zksync-node/src/server.ts @@ -0,0 +1,26 @@ +import { execSync } from 'child_process'; + +import chalk from 'chalk'; + +import { PROCESS_TERMINATION_SIGNALS } from './constants'; + +export class JsonRpcServer { + private readonly binaryPath: string; + + constructor(binaryPath: string) { + this.binaryPath = binaryPath; + } + + public listen(args: string[] = []): void { + const command = `${this.binaryPath} ${args.join(' ')}`; + try { + execSync(command, { stdio: 'inherit' }); + } catch (error: any) { + if (PROCESS_TERMINATION_SIGNALS.includes(error.signal)) { + console.info(chalk.yellow(`Received ${error.signal}. Exiting.`)); + return; + } + throw error; + } + } +} diff --git a/packages/hardhat-zksync-node/src/types.ts b/packages/hardhat-zksync-node/src/types.ts new file mode 100644 index 000000000..c618d6cc6 --- /dev/null +++ b/packages/hardhat-zksync-node/src/types.ts @@ -0,0 +1,13 @@ +export interface CommandArguments { + log?: string; + logFilePath?: string; + cache?: string; + cacheDir?: string; + resetCache?: boolean; + fork?: string; + showStorageLogs?: string; + showVmDetails?: string; + showGasDetails?: string; + showCalls?: boolean; + resolveHashes?: boolean; +} \ No newline at end of file diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts new file mode 100644 index 000000000..443042c0a --- /dev/null +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -0,0 +1,123 @@ +import path from 'path'; + +import { + ALLOWED_CACHE_VALUES, + ALLOWED_FORK_VALUES, + ALLOWED_LOG_VALUES, + ALLOWED_SHOW_GAS_DETAILS_VALUES, + ALLOWED_SHOW_STORAGE_LOGS_VALUES, + ALLOWED_SHOW_VM_DETAILS_VALUES, + PLATFORM_MAP, + TOOLCHAIN_MAP +} from "./constants"; +import { ZkSyncNodePluginError } from "./errors"; +import { CommandArguments } from "./types"; + +import { getCompilersDir } from 'hardhat/internal/util/global-dir'; + +export function constructCommandArgs(args: CommandArguments): string[] { + const commandArgs: string[] = []; + + if (args.log) { + if (!ALLOWED_LOG_VALUES.includes(args.log)) { + throw new ZkSyncNodePluginError(`Invalid log value: ${args.log}`); + } + commandArgs.push(`--log=${args.log}`); + } + + if (args.logFilePath) { + commandArgs.push(`--log-file-path=${args.logFilePath}`); + } + + if (args.cache) { + if (!ALLOWED_CACHE_VALUES.includes(args.cache)) { + throw new ZkSyncNodePluginError(`Invalid cache value: ${args.cache}`); + } + commandArgs.push(`--cache=${args.cache}`); + } + + if (args.cacheDir) { + commandArgs.push(`--cache-dir=${args.cacheDir}`); + } + + if (args.resetCache) { + commandArgs.push(`--reset-cache`); + } + + if (args.fork) { + if (!ALLOWED_FORK_VALUES.includes(args.fork)) { + throw new ZkSyncNodePluginError(`Invalid fork value: ${args.fork}`); + } + commandArgs.push(`--fork=${args.fork}`); + } + + if (args.showStorageLogs) { + if (!ALLOWED_SHOW_STORAGE_LOGS_VALUES.includes(args.showStorageLogs)) { + throw new ZkSyncNodePluginError(`Invalid showStorageLogs value: ${args.showStorageLogs}`); + } + commandArgs.push(`--show-storage-logs=${args.showStorageLogs}`); + } + + if (args.showVmDetails) { + if (!ALLOWED_SHOW_VM_DETAILS_VALUES.includes(args.showVmDetails)) { + throw new ZkSyncNodePluginError(`Invalid showVmDetails value: ${args.showVmDetails}`); + } + commandArgs.push(`--show-vm-details=${args.showVmDetails}`); + } + + if (args.showGasDetails) { + if (!ALLOWED_SHOW_GAS_DETAILS_VALUES.includes(args.showGasDetails)) { + throw new ZkSyncNodePluginError(`Invalid showGasDetails value: ${args.showGasDetails}`); + } + commandArgs.push(`--show-gas-details=${args.showGasDetails}`); + } + + if (args.showCalls) { + commandArgs.push(`--show-calls`); + } + + if (args.resolveHashes) { + commandArgs.push(`--resolve-hashes`); + } + + commandArgs.push('run'); + + return commandArgs; +} + +function getPlatform() { + return PLATFORM_MAP[process.platform]; +} + +function getToolchain() { + return TOOLCHAIN_MAP[process.platform]; +} + +function getArch() { + return process.arch === 'x64' ? 'amd64' : process.arch; +} + +function getExtension() { + return process.platform === 'win32' ? '.exe' : ''; +} + +// TODO: This will be (probably) changed once there are binaries stored on GitHub. And than we can use it. +export function getRPCServerBinaryURL(repo: string, version: string, isRelease: boolean = true): string { + const platform = getPlatform(); + const toolchain = getToolchain(); + const arch = getArch(); + const ext = getExtension(); + + const commonPath = `era-test-node-${platform}-${arch}${toolchain}-v${version}${ext}`; + return isRelease + ? `${repo}/releases/download/v${version}/${commonPath}` + : `${repo}/raw/main/${platform}-${arch}/${commonPath}`; +} + +export async function getRPCServerBinariesDir(): Promise { + const compilersCachePath = await getCompilersDir(); + const basePath = path.dirname(compilersCachePath); + const rpcServerBinariesPath = path.join(basePath, 'zksync-memory-node'); + + return rpcServerBinariesPath; +} diff --git a/packages/hardhat-zksync-node/test/.eslintrc.js b/packages/hardhat-zksync-node/test/.eslintrc.js new file mode 100644 index 000000000..12ee5882f --- /dev/null +++ b/packages/hardhat-zksync-node/test/.eslintrc.js @@ -0,0 +1,11 @@ +module.exports = { + extends: [`${__dirname}/../.eslintrc.js`], + rules: { + "import/no-extraneous-dependencies": [ + "error", + { + devDependencies: true, + }, + ], + }, +}; diff --git a/packages/hardhat-zksync-node/test/common.config.ts b/packages/hardhat-zksync-node/test/common.config.ts new file mode 100644 index 000000000..b3e43fb37 --- /dev/null +++ b/packages/hardhat-zksync-node/test/common.config.ts @@ -0,0 +1,19 @@ +import '../../hardhat-zksync-solc/src/index'; +import '../src/index'; +import { HardhatUserConfig } from 'hardhat/config'; + +const config: HardhatUserConfig = { + zksolc: { + compilerSource: 'binary', + }, + networks: { + hardhat: { + zksync: true, + }, + }, + solidity: { + version: process.env.SOLC_VERSION || '0.8.17', + }, +}; + +export default config; diff --git a/packages/hardhat-zksync-node/test/fixture-projects/.gitignore b/packages/hardhat-zksync-node/test/fixture-projects/.gitignore new file mode 100644 index 000000000..1a65e437a --- /dev/null +++ b/packages/hardhat-zksync-node/test/fixture-projects/.gitignore @@ -0,0 +1,2 @@ +/*/cache +/*/artifacts diff --git a/packages/hardhat-zksync-node/test/fixture-projects/simple/contracts/Greeter.sol b/packages/hardhat-zksync-node/test/fixture-projects/simple/contracts/Greeter.sol new file mode 100644 index 000000000..8045d4954 --- /dev/null +++ b/packages/hardhat-zksync-node/test/fixture-projects/simple/contracts/Greeter.sol @@ -0,0 +1,18 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.4.22 <0.9.0; + +contract Greeter { + + string greeting; + string bad; + constructor(string memory _greeting) { + greeting = _greeting; + bad = "baaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaad"; + } + + function greet() public view returns (string memory) { + return greeting; + } + +} diff --git a/packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.js b/packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.js new file mode 100644 index 000000000..12575417f --- /dev/null +++ b/packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.js @@ -0,0 +1 @@ +module.exports = require('../../common.config'); diff --git a/packages/hardhat-zksync-node/test/helpers.ts b/packages/hardhat-zksync-node/test/helpers.ts new file mode 100644 index 000000000..9a67c0faf --- /dev/null +++ b/packages/hardhat-zksync-node/test/helpers.ts @@ -0,0 +1,24 @@ +import { resetHardhatContext } from 'hardhat/plugins-testing'; +import { HardhatRuntimeEnvironment } from 'hardhat/types'; +import { TASK_CLEAN } from 'hardhat/builtin-tasks/task-names'; +import path from 'path'; + +declare module 'mocha' { + interface Context { + env: HardhatRuntimeEnvironment; + } +} + +export function useEnvironment(fixtureProjectName: string, networkName = 'hardhat') { + beforeEach('Loading hardhat environment', function () { + process.chdir(path.join(__dirname, 'fixture-projects', fixtureProjectName)); + process.env.HARDHAT_NETWORK = networkName; + + this.env = require('hardhat'); + // this.env.run(TASK_CLEAN); + }); + + afterEach('Resetting hardhat', function () { + resetHardhatContext(); + }); +} diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts new file mode 100644 index 000000000..f7402cb3b --- /dev/null +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -0,0 +1,16 @@ +import { assert } from 'chai'; +import chalk from 'chalk'; + +import { TASK_NODE_ZKSYNC } from '../src/constants'; + +import { useEnvironment } from './helpers'; + +describe('zksolc plugin', async function () { + describe('Simple', async function () { + useEnvironment('simple'); + + it('Should successfully compile a simple contract', async function () { + await this.env.run(TASK_NODE_ZKSYNC); + }); + }); +}); \ No newline at end of file diff --git a/packages/hardhat-zksync-node/tsconfig.json b/packages/hardhat-zksync-node/tsconfig.json new file mode 100644 index 000000000..49580db2e --- /dev/null +++ b/packages/hardhat-zksync-node/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../config/typescript/tsconfig.json", + "compilerOptions": { + "outDir": "./dist" + }, + "exclude": ["./dist", "./node_modules", "./test"] +} diff --git a/yarn.lock b/yarn.lock index 77ea1fc33..28d1da6f1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -715,6 +715,13 @@ chalk "4.1.2" ts-morph "^19.0.0" +"@matterlabs/hardhat-zksync-node@link:packages/hardhat-zksync-node": + version "0.0.1" + dependencies: + "@matterlabs/hardhat-zksync-solc" "0.4.2" + chalk "4.1.2" + ts-morph "^19.0.0" + "@matterlabs/hardhat-zksync-solc@0.4.1": version "0.4.1" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.4.1.tgz#e8e67d947098d7bb8925f968544d34e522af5a9c" @@ -746,7 +753,7 @@ dockerode "^3.3.4" "@matterlabs/hardhat-zksync-vyper@link:packages/hardhat-zksync-vyper": - version "0.2.1" + version "0.2.2" dependencies: "@nomiclabs/hardhat-docker" "^2.0.0" chalk "4.1.2" From 9e65d78939a83fb2ec8ce798c8519e57f6d7ed83 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 29 Sep 2023 14:20:13 +0200 Subject: [PATCH 02/17] Add more subtasks to hardhat-zksync-node plugin --- packages/hardhat-zksync-node/package.json | 3 + packages/hardhat-zksync-node/src/constants.ts | 20 +- .../hardhat-zksync-node/src/downloader.ts | 40 +++- packages/hardhat-zksync-node/src/index.ts | 89 ++++++-- packages/hardhat-zksync-node/src/server.ts | 14 +- packages/hardhat-zksync-node/src/utils.ts | 194 +++++++++++++++--- yarn.lock | 2 + 7 files changed, 292 insertions(+), 70 deletions(-) diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index a475f673f..73dd765ce 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -33,11 +33,14 @@ ], "dependencies": { "@matterlabs/hardhat-zksync-solc": "0.4.2", + "axios": "^1.4.0", "chalk": "4.1.2", + "fs-extra": "^11.1.1", "ts-morph": "^19.0.0" }, "devDependencies": { "@types/chai": "^4.2.0", + "@types/fs-extra": "^5.1.0", "@types/mocha": "^9.1.0", "@types/node": "^18.11.17", "@typescript-eslint/eslint-plugin": "5.13.0", diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index 7fc917c30..1a7b89eaf 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -1,7 +1,11 @@ export const PLUGIN_NAME = '@matterlabs/hardhat-zksync-node'; +export const ZKNODE_BIN_OWNER = 'matter-labs'; +export const ZKNODE_BIN_REPOSITORY_NAME = 'era-test-node'; + export const TASK_NODE_ZKSYNC = 'node-zksync'; export const TASK_NODE_ZKSYNC_CREATE_SERVER = 'node-zksync:create-server'; +export const TASK_NODE_ZKSYNC_DOWNLOAD_BINARY = 'node-zksync:download-binary'; export const PROCESS_TERMINATION_SIGNALS = ['SIGINT', 'SIGTERM']; @@ -13,13 +17,15 @@ export const ALLOWED_SHOW_VM_DETAILS_VALUES = ['none', 'all']; export const ALLOWED_SHOW_GAS_DETAILS_VALUES = ['none', 'all']; export const PLATFORM_MAP: Record = { - darwin: 'macosx', - linux: 'linux', + darwin: 'apple-darwin', + linux: 'unknown-linux-gnu', win32: 'windows', }; -export const TOOLCHAIN_MAP: Record = { - linux: '-musl', - win32: '-gnu', - darwin: '', -}; +export const TEMP_FILE_PREFIX = "tmp-"; + +// export const TOOLCHAIN_MAP: Record = { +// linux: '-musl', +// win32: '-gnu', +// darwin: '', +// }; diff --git a/packages/hardhat-zksync-node/src/downloader.ts b/packages/hardhat-zksync-node/src/downloader.ts index 2b056f48b..f6e912d78 100644 --- a/packages/hardhat-zksync-node/src/downloader.ts +++ b/packages/hardhat-zksync-node/src/downloader.ts @@ -1,20 +1,38 @@ -export class RPCServerBinaryDownloader { - private readonly _binaryPath: string; +import path from "path"; +import fs from "fs"; +import fse from 'fs-extra'; +import { download } from "./utils"; +import { ZkSyncNodePluginError } from "./errors"; +import { PLUGIN_NAME } from "./constants"; - constructor(binaryPath: string) { - this._binaryPath = binaryPath; +export class RPCServerDownloader { + private readonly _binaryDir: string; + private readonly _version: string; + + constructor(binaryDir: string, version: string) { + this._binaryDir = binaryDir; + this._version = version; + } + + public async isDownloaded(): Promise { + return fs.existsSync(this.getBinaryPath()); } - public async isBinaryDownloaded(): Promise { - // Check if the binary file exists at the _binaryPath location. - return false; + public async download(url: string): Promise { + try { + await download(url, this.getBinaryPath(), PLUGIN_NAME, this._version, 30000); + await this._postProcessDownload(); + } catch (error: any) { + throw new ZkSyncNodePluginError(`Error downloading binary from URL ${url}: ${error.message}`); + } } - public async download(): Promise { - // Download the binary file to the _binaryPath location. + public getBinaryPath(): string { + return path.join(this._binaryDir, this._version); } - public get binaryPath(): string { - return this._binaryPath; + private async _postProcessDownload(): Promise { + const binaryPath = this.getBinaryPath(); + fse.chmodSync(binaryPath, 0o755); } } \ No newline at end of file diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index 7854f1c4c..b7f4e18bf 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -1,12 +1,71 @@ import { task, subtask, types } from "hardhat/config"; -import { TASK_NODE_ZKSYNC, TASK_NODE_ZKSYNC_CREATE_SERVER } from "./constants"; +import { PLUGIN_NAME, TASK_NODE_ZKSYNC, TASK_NODE_ZKSYNC_CREATE_SERVER, TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, ZKNODE_BIN_OWNER, ZKNODE_BIN_REPOSITORY_NAME } from "./constants"; import { JsonRpcServer } from "./server"; -import { constructCommandArgs, getRPCServerBinariesDir } from "./utils"; -import { RPCServerBinaryDownloader } from "./downloader"; +import { constructCommandArgs, getAssetToDownload, getLatestRelease, getRPCServerBinariesDir } from "./utils"; +import { RPCServerDownloader } from "./downloader"; +import { ZkSyncNodePluginError } from "./errors"; -//TODO: Add more tasks +// Subtask to download the binary +subtask(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, "Downloads the JSON-RPC server binary") + .addFlag( + "force", + "Force download even if the binary already exists" + ) + .setAction( + async ( + { + force, + }: { + force: boolean; + }, + hre + ) => { + // Directory where the binaries are stored + const rpcServerBinaryDir = await getRPCServerBinariesDir(); + + // Get the latest release of the binary + const latestRelease = await getLatestRelease(ZKNODE_BIN_OWNER, ZKNODE_BIN_REPOSITORY_NAME, PLUGIN_NAME); + const downloader: RPCServerDownloader = new RPCServerDownloader(rpcServerBinaryDir, latestRelease.tag_name); + // Check if the binary is already downloaded + if (!force && await downloader.isDownloaded()) { + return downloader.getBinaryPath(); + } + + // Download the binary + const assetToDownload: any = await getAssetToDownload(latestRelease); + await downloader.download(assetToDownload.browser_download_url); + + return downloader.getBinaryPath(); + } + ); + +// Subtask to create the server +subtask(TASK_NODE_ZKSYNC_CREATE_SERVER, "Creates a JSON-RPC server for zkSync node") + .addParam( + "binaryPath", + "Path to the binary file", + undefined, + types.string + ) + .setAction( + async ( + { + binaryPath, + }: { + binaryPath: string; + }, + hre + ) => { + // Create the server + const server: JsonRpcServer = new JsonRpcServer(binaryPath); + + return server; + } + ); + +// Main task of the plugin. It starts the server and listens for requests. task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") .addOptionalParam( "log", @@ -68,6 +127,10 @@ task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") "resolveHashes", "Ask openchain for ABI names" ) + .addFlag( + "force", + "Force download even if the binary already exists" + ) .setAction( async ( { @@ -95,7 +158,9 @@ task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") showCalls: boolean; resolveHashes: boolean; }, - hre + { + run, + } ) => { const commandArgs = constructCommandArgs({ log, @@ -111,18 +176,16 @@ task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") resolveHashes, }); - const rpcServerBinariyPath = await getRPCServerBinariesDir(); - const downloader: RPCServerBinaryDownloader = new RPCServerBinaryDownloader(rpcServerBinariyPath); - console.log(downloader.binaryPath); + // Download the binary + const binaryPath: string = await run(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, { force: false }); - //TODO: Change this to the path of the binary - const binaryPath = "/Users/milivojepopovac/TxFusion/Templates/era-test-node/target/debug/era_test_node"; - const server: JsonRpcServer = new JsonRpcServer(binaryPath); + // Create the server + const server: JsonRpcServer = await run(TASK_NODE_ZKSYNC_CREATE_SERVER, { binaryPath }); try { - server.listen(commandArgs); // Add any arguments if needed + server.listen(commandArgs); } catch (error: any) { - throw error; + throw new ZkSyncNodePluginError(error.message); } } ); diff --git a/packages/hardhat-zksync-node/src/server.ts b/packages/hardhat-zksync-node/src/server.ts index c29f56585..8cd373496 100644 --- a/packages/hardhat-zksync-node/src/server.ts +++ b/packages/hardhat-zksync-node/src/server.ts @@ -1,26 +1,26 @@ import { execSync } from 'child_process'; - import chalk from 'chalk'; import { PROCESS_TERMINATION_SIGNALS } from './constants'; export class JsonRpcServer { - private readonly binaryPath: string; + private readonly _serverBinaryPath: string; - constructor(binaryPath: string) { - this.binaryPath = binaryPath; + constructor(serverBinaryPath: string) { + this._serverBinaryPath = serverBinaryPath; } public listen(args: string[] = []): void { - const command = `${this.binaryPath} ${args.join(' ')}`; + const command = `${this._serverBinaryPath} ${args.join(' ')}`; try { + console.info(chalk.green(`Starting the JSON-RPC server with command: ${command}`)); execSync(command, { stdio: 'inherit' }); } catch (error: any) { if (PROCESS_TERMINATION_SIGNALS.includes(error.signal)) { - console.info(chalk.yellow(`Received ${error.signal}. Exiting.`)); + console.info(chalk.yellow(`Received ${error.signal} signal. The server process has exited.`)); return; } - throw error; + throw new Error(`The server process has exited with an error: ${error.message}`); } } } diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index 443042c0a..e28627754 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -1,20 +1,27 @@ import path from 'path'; - -import { - ALLOWED_CACHE_VALUES, - ALLOWED_FORK_VALUES, - ALLOWED_LOG_VALUES, - ALLOWED_SHOW_GAS_DETAILS_VALUES, - ALLOWED_SHOW_STORAGE_LOGS_VALUES, - ALLOWED_SHOW_VM_DETAILS_VALUES, +import axios from 'axios'; +import util from "util"; +import fs from "fs"; +import fse from 'fs-extra'; +import { exec } from 'child_process'; +import type { Dispatcher } from "undici"; + +import { + ALLOWED_CACHE_VALUES, + ALLOWED_FORK_VALUES, + ALLOWED_LOG_VALUES, + ALLOWED_SHOW_GAS_DETAILS_VALUES, + ALLOWED_SHOW_STORAGE_LOGS_VALUES, + ALLOWED_SHOW_VM_DETAILS_VALUES, PLATFORM_MAP, - TOOLCHAIN_MAP + TEMP_FILE_PREFIX } from "./constants"; import { ZkSyncNodePluginError } from "./errors"; import { CommandArguments } from "./types"; import { getCompilersDir } from 'hardhat/internal/util/global-dir'; +// Generates command arguments for running the era-test-node binary export function constructCommandArgs(args: CommandArguments): string[] { const commandArgs: string[] = []; @@ -86,34 +93,15 @@ export function constructCommandArgs(args: CommandArguments): string[] { } function getPlatform() { - return PLATFORM_MAP[process.platform]; -} - -function getToolchain() { - return TOOLCHAIN_MAP[process.platform]; + return PLATFORM_MAP[process.platform] || ''; } function getArch() { - return process.arch === 'x64' ? 'amd64' : process.arch; -} - -function getExtension() { - return process.platform === 'win32' ? '.exe' : ''; -} - -// TODO: This will be (probably) changed once there are binaries stored on GitHub. And than we can use it. -export function getRPCServerBinaryURL(repo: string, version: string, isRelease: boolean = true): string { - const platform = getPlatform(); - const toolchain = getToolchain(); - const arch = getArch(); - const ext = getExtension(); - - const commonPath = `era-test-node-${platform}-${arch}${toolchain}-v${version}${ext}`; - return isRelease - ? `${repo}/releases/download/v${version}/${commonPath}` - : `${repo}/raw/main/${platform}-${arch}/${commonPath}`; + const arch = process.arch === 'x64' ? 'x86_64' : process.arch; + return process.arch === 'arm64' ? 'aarch64' : arch; } +// Returns the path to the directory where the era-test-node binary is/will be located export async function getRPCServerBinariesDir(): Promise { const compilersCachePath = await getCompilersDir(); const basePath = path.dirname(compilersCachePath); @@ -121,3 +109,145 @@ export async function getRPCServerBinariesDir(): Promise { return rpcServerBinariesPath; } + +// Get latest release from GitHub of the era-test-node binary +export async function getLatestRelease(owner: string, repo: string, userAgent: string): Promise { + const url = `https://api.github.com/repos/${owner}/${repo}/releases/latest`; + + try { + const response = await axios.get(url, { + headers: { + 'User-Agent': userAgent, + } + }); + + return response.data; + } catch (error: any) { + if (error.response) { + // The request was made and the server responded with a status code outside of the range of 2xx + throw new ZkSyncNodePluginError( + `Failed to get latest release for ${owner}/${repo}. Status: ${error.response.status}, Data: ${JSON.stringify(error.response.data)}` + ); + } else if (error.request) { + // The request was made but no response was received + throw new ZkSyncNodePluginError(`No response received for ${owner}/${repo}. Error: ${error.message}`); + } else { + // Something happened in setting up the request that triggered an Error + throw new ZkSyncNodePluginError(`Failed to set up the request for ${owner}/${repo}: ${error.message}`); + } + } +} + +// Get the asset to download from the latest release of the era-test-node binary +export async function getAssetToDownload(latestRelease: any): Promise { + const prefix = "era_test_node-" + latestRelease.tag_name; + const expectedAssetName = `${prefix}-${getArch()}-${getPlatform()}.tar.gz`; + + return latestRelease.assets.find((asset: any) => asset.name === expectedAssetName); +} + +function isTarGzFile(filePath: string): boolean { + return path.extname(filePath) === '.gz' && path.extname(path.basename(filePath, '.gz')) === '.tar'; +} + +function ensureTarGzExtension(filePath: string): string { + return filePath.endsWith(".tar.gz") ? filePath : filePath + ".tar.gz"; +} + +async function ensureDirectory(filePath: string): Promise { + await fse.ensureDir(path.dirname(filePath)); +} + +async function moveFile(sourcePath: string, destinationPath: string): Promise { + await fse.move(sourcePath, destinationPath, { overwrite: true }); +} + +function resolveTempFileName(filePath: string): string { + const { dir, ext, name } = path.parse(filePath); + + return path.format({ + dir, + ext, + name: `${TEMP_FILE_PREFIX}${name}`, + }); +} + +// Extracts the contents of a tar.gz archive to a file +async function extractTarGz(tmpFilePath: string, filePath: string): Promise { + const tempExtractionDir = path.join(path.dirname(tmpFilePath), `tmp_extract_${Date.now()}`); + await fse.ensureDir(tempExtractionDir); + + // Using native tar command for extraction + await new Promise((resolve, reject) => { + exec(`tar -xzf ${tmpFilePath} -C ${tempExtractionDir}`, (error, stdout, stderr) => { + if (error) { + reject(error); + } else { + resolve(stdout); + } + }); + }); + + const filesInTempExtractionDir = await fse.readdir(tempExtractionDir); + if (filesInTempExtractionDir.length !== 1) { + throw new Error('Expected a single file inside the tar.gz archive.'); + } + + const extractedFileName = filesInTempExtractionDir[0]; + const extractedFilePath = path.join(tempExtractionDir, extractedFileName); + + await moveFile(extractedFilePath, filePath.slice(0, -'.tar.gz'.length)); + await fse.remove(tempExtractionDir); +} + +// Downloads a file from a url and saves it to a file path +export async function download( + url: string, + filePath: string, + userAgent: string, + version: string, + timeoutMillis = 10000, + extraHeaders: { [name: string]: string } = {} +) { + const { pipeline } = await import("stream"); + const { getGlobalDispatcher, request } = await import("undici"); + const streamPipeline = util.promisify(pipeline); + + let dispatcher: Dispatcher = getGlobalDispatcher(); + + // Fetch the url + const response = await request(url, { + dispatcher, + headersTimeout: timeoutMillis, + maxRedirections: 10, + method: "GET", + headers: { + ...extraHeaders, + "User-Agent": `${userAgent} ${version}`, + }, + }); + + if (response.statusCode >= 200 && response.statusCode <= 299) { + const tmpFilePath = resolveTempFileName(filePath); + await ensureDirectory(filePath); + await streamPipeline(response.body, fs.createWriteStream(tmpFilePath)); + + if (isTarGzFile(url)) { + filePath = ensureTarGzExtension(filePath); + await extractTarGz(tmpFilePath, filePath); + } else { + await moveFile(tmpFilePath, filePath); + } + + await fse.remove(tmpFilePath); + return; + } + + // undici's response bodies must always be consumed to prevent leaks + const text = await response.body.text(); + + // eslint-disable-next-line + throw new Error( + `Failed to download ${url} - ${response.statusCode} received. ${text}` + ); +} diff --git a/yarn.lock b/yarn.lock index 28d1da6f1..90a3a49ec 100644 --- a/yarn.lock +++ b/yarn.lock @@ -719,7 +719,9 @@ version "0.0.1" dependencies: "@matterlabs/hardhat-zksync-solc" "0.4.2" + axios "^1.4.0" chalk "4.1.2" + fs-extra "^11.1.1" ts-morph "^19.0.0" "@matterlabs/hardhat-zksync-solc@0.4.1": From 08f7b52dc2f3c43563f4c2b480071eebcf3efdfd Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 29 Sep 2023 14:28:30 +0200 Subject: [PATCH 03/17] Run prettier --- packages/hardhat-zksync-node/src/constants.ts | 2 +- .../hardhat-zksync-node/src/downloader.ts | 12 +- packages/hardhat-zksync-node/src/index.ts | 119 +++++------------- packages/hardhat-zksync-node/src/server.ts | 2 +- packages/hardhat-zksync-node/src/types.ts | 2 +- packages/hardhat-zksync-node/src/utils.ts | 36 +++--- packages/hardhat-zksync-node/test/tests.ts | 2 +- 7 files changed, 60 insertions(+), 115 deletions(-) diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index 1a7b89eaf..04a911eef 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -22,7 +22,7 @@ export const PLATFORM_MAP: Record = { win32: 'windows', }; -export const TEMP_FILE_PREFIX = "tmp-"; +export const TEMP_FILE_PREFIX = 'tmp-'; // export const TOOLCHAIN_MAP: Record = { // linux: '-musl', diff --git a/packages/hardhat-zksync-node/src/downloader.ts b/packages/hardhat-zksync-node/src/downloader.ts index f6e912d78..f850d35e2 100644 --- a/packages/hardhat-zksync-node/src/downloader.ts +++ b/packages/hardhat-zksync-node/src/downloader.ts @@ -1,9 +1,9 @@ -import path from "path"; -import fs from "fs"; +import path from 'path'; +import fs from 'fs'; import fse from 'fs-extra'; -import { download } from "./utils"; -import { ZkSyncNodePluginError } from "./errors"; -import { PLUGIN_NAME } from "./constants"; +import { download } from './utils'; +import { ZkSyncNodePluginError } from './errors'; +import { PLUGIN_NAME } from './constants'; export class RPCServerDownloader { private readonly _binaryDir: string; @@ -35,4 +35,4 @@ export class RPCServerDownloader { const binaryPath = this.getBinaryPath(); fse.chmodSync(binaryPath, 0o755); } -} \ No newline at end of file +} diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index b7f4e18bf..996fe4451 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -1,17 +1,21 @@ -import { task, subtask, types } from "hardhat/config"; +import { task, subtask, types } from 'hardhat/config'; -import { PLUGIN_NAME, TASK_NODE_ZKSYNC, TASK_NODE_ZKSYNC_CREATE_SERVER, TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, ZKNODE_BIN_OWNER, ZKNODE_BIN_REPOSITORY_NAME } from "./constants"; -import { JsonRpcServer } from "./server"; -import { constructCommandArgs, getAssetToDownload, getLatestRelease, getRPCServerBinariesDir } from "./utils"; -import { RPCServerDownloader } from "./downloader"; -import { ZkSyncNodePluginError } from "./errors"; +import { + PLUGIN_NAME, + TASK_NODE_ZKSYNC, + TASK_NODE_ZKSYNC_CREATE_SERVER, + TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, + ZKNODE_BIN_OWNER, + ZKNODE_BIN_REPOSITORY_NAME, +} from './constants'; +import { JsonRpcServer } from './server'; +import { constructCommandArgs, getAssetToDownload, getLatestRelease, getRPCServerBinariesDir } from './utils'; +import { RPCServerDownloader } from './downloader'; +import { ZkSyncNodePluginError } from './errors'; // Subtask to download the binary -subtask(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, "Downloads the JSON-RPC server binary") - .addFlag( - "force", - "Force download even if the binary already exists" - ) +subtask(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, 'Downloads the JSON-RPC server binary') + .addFlag('force', 'Force download even if the binary already exists') .setAction( async ( { @@ -29,7 +33,7 @@ subtask(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, "Downloads the JSON-RPC server binary" const downloader: RPCServerDownloader = new RPCServerDownloader(rpcServerBinaryDir, latestRelease.tag_name); // Check if the binary is already downloaded - if (!force && await downloader.isDownloaded()) { + if (!force && (await downloader.isDownloaded())) { return downloader.getBinaryPath(); } @@ -42,13 +46,8 @@ subtask(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, "Downloads the JSON-RPC server binary" ); // Subtask to create the server -subtask(TASK_NODE_ZKSYNC_CREATE_SERVER, "Creates a JSON-RPC server for zkSync node") - .addParam( - "binaryPath", - "Path to the binary file", - undefined, - types.string - ) +subtask(TASK_NODE_ZKSYNC_CREATE_SERVER, 'Creates a JSON-RPC server for zkSync node') + .addParam('binaryPath', 'Path to the binary file', undefined, types.string) .setAction( async ( { @@ -66,71 +65,19 @@ subtask(TASK_NODE_ZKSYNC_CREATE_SERVER, "Creates a JSON-RPC server for zkSync no ); // Main task of the plugin. It starts the server and listens for requests. -task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") - .addOptionalParam( - "log", - "Logging level (error, warn, info, debug)", - undefined, - types.string - ) - .addOptionalParam( - "logFilePath", - "Path to the file where logs should be written", - undefined, - types.string - ) - .addOptionalParam( - "cache", - "Cache network request (none, disk, memory)", - undefined, - types.string - ) - .addOptionalParam( - "cacheDir", - "Path to the directory where cache should be stored", - undefined, - types.string - ) - .addFlag( - "resetCache", - "Reset cache before start", - ) - .addOptionalParam( - "fork", - "Fork from the specified network (testnet, mainnet)", - undefined, - types.string - ) - .addOptionalParam( - "showSorageLogs", - "Show storage logs (none, read, write, all)", - undefined, - types.string - ) - .addOptionalParam( - "showVmDetails", - "Show VM details (none, all)", - undefined, - types.string - ) - .addOptionalParam( - "showGasDetails", - "Show gas details (none, all)", - undefined, - types.string - ) - .addFlag( - "showCalls", - "Print more detailed call traces" - ) - .addFlag( - "resolveHashes", - "Ask openchain for ABI names" - ) - .addFlag( - "force", - "Force download even if the binary already exists" - ) +task(TASK_NODE_ZKSYNC, 'Starts a JSON-RPC server for zkSync node') + .addOptionalParam('log', 'Logging level (error, warn, info, debug)', undefined, types.string) + .addOptionalParam('logFilePath', 'Path to the file where logs should be written', undefined, types.string) + .addOptionalParam('cache', 'Cache network request (none, disk, memory)', undefined, types.string) + .addOptionalParam('cacheDir', 'Path to the directory where cache should be stored', undefined, types.string) + .addFlag('resetCache', 'Reset cache before start') + .addOptionalParam('fork', 'Fork from the specified network (testnet, mainnet)', undefined, types.string) + .addOptionalParam('showSorageLogs', 'Show storage logs (none, read, write, all)', undefined, types.string) + .addOptionalParam('showVmDetails', 'Show VM details (none, all)', undefined, types.string) + .addOptionalParam('showGasDetails', 'Show gas details (none, all)', undefined, types.string) + .addFlag('showCalls', 'Print more detailed call traces') + .addFlag('resolveHashes', 'Ask openchain for ABI names') + .addFlag('force', 'Force download even if the binary already exists') .setAction( async ( { @@ -158,9 +105,7 @@ task(TASK_NODE_ZKSYNC, "Starts a JSON-RPC server for zkSync node") showCalls: boolean; resolveHashes: boolean; }, - { - run, - } + { run } ) => { const commandArgs = constructCommandArgs({ log, diff --git a/packages/hardhat-zksync-node/src/server.ts b/packages/hardhat-zksync-node/src/server.ts index 8cd373496..9c5643b27 100644 --- a/packages/hardhat-zksync-node/src/server.ts +++ b/packages/hardhat-zksync-node/src/server.ts @@ -20,7 +20,7 @@ export class JsonRpcServer { console.info(chalk.yellow(`Received ${error.signal} signal. The server process has exited.`)); return; } - throw new Error(`The server process has exited with an error: ${error.message}`); + throw new Error(`The server process has exited with an error: ${error.message}`); } } } diff --git a/packages/hardhat-zksync-node/src/types.ts b/packages/hardhat-zksync-node/src/types.ts index c618d6cc6..6f037ef61 100644 --- a/packages/hardhat-zksync-node/src/types.ts +++ b/packages/hardhat-zksync-node/src/types.ts @@ -10,4 +10,4 @@ export interface CommandArguments { showGasDetails?: string; showCalls?: boolean; resolveHashes?: boolean; -} \ No newline at end of file +} diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index e28627754..136622f44 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -1,10 +1,10 @@ import path from 'path'; import axios from 'axios'; -import util from "util"; -import fs from "fs"; +import util from 'util'; +import fs from 'fs'; import fse from 'fs-extra'; import { exec } from 'child_process'; -import type { Dispatcher } from "undici"; +import type { Dispatcher } from 'undici'; import { ALLOWED_CACHE_VALUES, @@ -14,10 +14,10 @@ import { ALLOWED_SHOW_STORAGE_LOGS_VALUES, ALLOWED_SHOW_VM_DETAILS_VALUES, PLATFORM_MAP, - TEMP_FILE_PREFIX -} from "./constants"; -import { ZkSyncNodePluginError } from "./errors"; -import { CommandArguments } from "./types"; + TEMP_FILE_PREFIX, +} from './constants'; +import { ZkSyncNodePluginError } from './errors'; +import { CommandArguments } from './types'; import { getCompilersDir } from 'hardhat/internal/util/global-dir'; @@ -118,7 +118,7 @@ export async function getLatestRelease(owner: string, repo: string, userAgent: s const response = await axios.get(url, { headers: { 'User-Agent': userAgent, - } + }, }); return response.data; @@ -126,7 +126,9 @@ export async function getLatestRelease(owner: string, repo: string, userAgent: s if (error.response) { // The request was made and the server responded with a status code outside of the range of 2xx throw new ZkSyncNodePluginError( - `Failed to get latest release for ${owner}/${repo}. Status: ${error.response.status}, Data: ${JSON.stringify(error.response.data)}` + `Failed to get latest release for ${owner}/${repo}. Status: ${ + error.response.status + }, Data: ${JSON.stringify(error.response.data)}` ); } else if (error.request) { // The request was made but no response was received @@ -140,7 +142,7 @@ export async function getLatestRelease(owner: string, repo: string, userAgent: s // Get the asset to download from the latest release of the era-test-node binary export async function getAssetToDownload(latestRelease: any): Promise { - const prefix = "era_test_node-" + latestRelease.tag_name; + const prefix = 'era_test_node-' + latestRelease.tag_name; const expectedAssetName = `${prefix}-${getArch()}-${getPlatform()}.tar.gz`; return latestRelease.assets.find((asset: any) => asset.name === expectedAssetName); @@ -151,7 +153,7 @@ function isTarGzFile(filePath: string): boolean { } function ensureTarGzExtension(filePath: string): string { - return filePath.endsWith(".tar.gz") ? filePath : filePath + ".tar.gz"; + return filePath.endsWith('.tar.gz') ? filePath : filePath + '.tar.gz'; } async function ensureDirectory(filePath: string): Promise { @@ -209,8 +211,8 @@ export async function download( timeoutMillis = 10000, extraHeaders: { [name: string]: string } = {} ) { - const { pipeline } = await import("stream"); - const { getGlobalDispatcher, request } = await import("undici"); + const { pipeline } = await import('stream'); + const { getGlobalDispatcher, request } = await import('undici'); const streamPipeline = util.promisify(pipeline); let dispatcher: Dispatcher = getGlobalDispatcher(); @@ -220,10 +222,10 @@ export async function download( dispatcher, headersTimeout: timeoutMillis, maxRedirections: 10, - method: "GET", + method: 'GET', headers: { ...extraHeaders, - "User-Agent": `${userAgent} ${version}`, + 'User-Agent': `${userAgent} ${version}`, }, }); @@ -247,7 +249,5 @@ export async function download( const text = await response.body.text(); // eslint-disable-next-line - throw new Error( - `Failed to download ${url} - ${response.statusCode} received. ${text}` - ); + throw new Error(`Failed to download ${url} - ${response.statusCode} received. ${text}`); } diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index f7402cb3b..27e7cd6d2 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -13,4 +13,4 @@ describe('zksolc plugin', async function () { await this.env.run(TASK_NODE_ZKSYNC); }); }); -}); \ No newline at end of file +}); From 5f1256030d67bdec939398e0650f8238e32e5158 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 29 Sep 2023 14:43:50 +0200 Subject: [PATCH 04/17] Raise an error if plafrom is windows --- packages/hardhat-zksync-node/src/utils.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index 136622f44..7892430e0 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -142,8 +142,15 @@ export async function getLatestRelease(owner: string, repo: string, userAgent: s // Get the asset to download from the latest release of the era-test-node binary export async function getAssetToDownload(latestRelease: any): Promise { + const platform = getPlatform(); + + // TODO: Add support for Windows + if (platform === 'windows' || platform === '') { + throw new ZkSyncNodePluginError(`Unsupported platform: ${platform}`); + } + const prefix = 'era_test_node-' + latestRelease.tag_name; - const expectedAssetName = `${prefix}-${getArch()}-${getPlatform()}.tar.gz`; + const expectedAssetName = `${prefix}-${getArch()}-${platform}.tar.gz`; return latestRelease.assets.find((asset: any) => asset.name === expectedAssetName); } From 16fe745377a99832d08cb4abca1b2be43e22a8d3 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Mon, 2 Oct 2023 17:34:53 +0200 Subject: [PATCH 05/17] Add more tests to hardhat-zksync-node plugin --- packages/hardhat-zksync-node/package.json | 3 + .../{hardhat.config.js => hardhat.config.ts} | 0 .../fixture-projects/simple/runHardhatTask.js | 13 + packages/hardhat-zksync-node/test/helpers.ts | 24 -- packages/hardhat-zksync-node/test/tests.ts | 401 +++++++++++++++++- yarn.lock | 101 ++++- 6 files changed, 509 insertions(+), 33 deletions(-) rename packages/hardhat-zksync-node/test/fixture-projects/simple/{hardhat.config.js => hardhat.config.ts} (100%) create mode 100644 packages/hardhat-zksync-node/test/fixture-projects/simple/runHardhatTask.js delete mode 100644 packages/hardhat-zksync-node/test/helpers.ts diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index 73dd765ce..aae601032 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -43,6 +43,7 @@ "@types/fs-extra": "^5.1.0", "@types/mocha": "^9.1.0", "@types/node": "^18.11.17", + "@types/proxyquire": "^1.3.29", "@typescript-eslint/eslint-plugin": "5.13.0", "@typescript-eslint/parser": "5.13.0", "chai": "^4.3.6", @@ -54,7 +55,9 @@ "hardhat": "^2.14.0", "mocha": "^9.2.1", "prettier": "2.5.1", + "proxyquire": "^2.1.3", "rimraf": "^3.0.2", + "sinon": "^16.0.0", "ts-node": "^10.6.0", "typescript": "^4.6.2", "zksync-web3": "^0.14.3" diff --git a/packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.js b/packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.ts similarity index 100% rename from packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.js rename to packages/hardhat-zksync-node/test/fixture-projects/simple/hardhat.config.ts diff --git a/packages/hardhat-zksync-node/test/fixture-projects/simple/runHardhatTask.js b/packages/hardhat-zksync-node/test/fixture-projects/simple/runHardhatTask.js new file mode 100644 index 000000000..ca40386c8 --- /dev/null +++ b/packages/hardhat-zksync-node/test/fixture-projects/simple/runHardhatTask.js @@ -0,0 +1,13 @@ +// const path = require('path'); +// process.chdir(path.join(__dirname)); + +const hre = require("hardhat"); +const TASK_NODE_ZKSYNC = 'node-zksync'; // This constant can be imported or directly defined here + +async function runTask(taskName) { + await hre.run(taskName); +} + +const taskName = process.argv[2] || TASK_NODE_ZKSYNC; + +runTask(taskName); \ No newline at end of file diff --git a/packages/hardhat-zksync-node/test/helpers.ts b/packages/hardhat-zksync-node/test/helpers.ts deleted file mode 100644 index 9a67c0faf..000000000 --- a/packages/hardhat-zksync-node/test/helpers.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { resetHardhatContext } from 'hardhat/plugins-testing'; -import { HardhatRuntimeEnvironment } from 'hardhat/types'; -import { TASK_CLEAN } from 'hardhat/builtin-tasks/task-names'; -import path from 'path'; - -declare module 'mocha' { - interface Context { - env: HardhatRuntimeEnvironment; - } -} - -export function useEnvironment(fixtureProjectName: string, networkName = 'hardhat') { - beforeEach('Loading hardhat environment', function () { - process.chdir(path.join(__dirname, 'fixture-projects', fixtureProjectName)); - process.env.HARDHAT_NETWORK = networkName; - - this.env = require('hardhat'); - // this.env.run(TASK_CLEAN); - }); - - afterEach('Resetting hardhat', function () { - resetHardhatContext(); - }); -} diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index 27e7cd6d2..d74c7c1d7 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -1,16 +1,401 @@ -import { assert } from 'chai'; +import { expect, assert } from 'chai'; import chalk from 'chalk'; +import sinon from 'sinon'; +import axios from 'axios'; +import fs from 'fs'; +import path from 'path'; +import proxyquire from 'proxyquire'; +import { spawn, ChildProcess } from "child_process"; -import { TASK_NODE_ZKSYNC } from '../src/constants'; +import * as utils from '../src/utils'; +import { constructCommandArgs, getLatestRelease, getAssetToDownload, download } from '../src/utils'; +import { RPCServerDownloader } from '../src/downloader'; +import { TASK_NODE_ZKSYNC, PROCESS_TERMINATION_SIGNALS } from '../src/constants'; -import { useEnvironment } from './helpers'; +describe('node-zksync plugin', async function () { + describe('Utils', () => { + describe('constructCommandArgs', () => { + it('should correctly construct command arguments', () => { + const args = { + log: 'error', + logFilePath: '/path/to/log', + cache: 'disk', + cacheDir: '/path/to/cache', + resetCache: true, + fork: 'mainnet', + showStorageLogs: 'all', + showVmDetails: 'none', + showGasDetails: 'all', + showCalls: true, + resolveHashes: true + }; -describe('zksolc plugin', async function () { - describe('Simple', async function () { - useEnvironment('simple'); + const result = constructCommandArgs(args); + expect(result).to.deep.equal([ + '--log=error', + '--log-file-path=/path/to/log', + '--cache=disk', + '--cache-dir=/path/to/cache', + '--reset-cache', + '--fork=mainnet', + '--show-storage-logs=all', + '--show-vm-details=none', + '--show-gas-details=all', + '--show-calls', + '--resolve-hashes', + 'run' + ]); + }); + }); + + describe('getAssetToDownload', () => { + let archStub: sinon.SinonStub; + let platformStub: sinon.SinonStub; + + const mockRelease = { + tag_name: 'v0.1.0', + assets: [ + { name: 'era_test_node-v0.1.0-aarch64-apple-darwin.tar.gz' }, + { name: 'era_test_node-v0.1.0-x86_64-apple-darwin.tar.gz' }, + { name: 'era_test_node-v0.1.0-x86_64-unknown-linux-gnu.tar.gz' } + ] + }; + + beforeEach(() => { + archStub = sinon.stub(process, 'arch'); + platformStub = sinon.stub(process, 'platform'); + }); + + afterEach(() => { + archStub.restore(); + platformStub.restore(); + }); + + it('should return the correct asset for x64 apple-darwin', async () => { + archStub.value('x64'); + platformStub.value('darwin'); + expect(await getAssetToDownload(mockRelease)).to.deep.equal(mockRelease.assets[1]); + }); + + it('should return the correct asset for aarch64 apple-darwin', async () => { + archStub.value('arm64'); + platformStub.value('darwin'); + expect(await getAssetToDownload(mockRelease)).to.deep.equal(mockRelease.assets[0]); + }); + + it('should return the correct asset for x64 linux', async () => { + archStub.value('x64'); + platformStub.value('linux'); + expect(await getAssetToDownload(mockRelease)).to.deep.equal(mockRelease.assets[2]); + }); + + it('should throw an error for unsupported platform', async () => { + archStub.value('x64'); + platformStub.value('win32'); + try { + await getAssetToDownload(mockRelease); + throw new Error("Expected an error to be thrown, but it wasn't."); + } catch (error: any) { + expect(error.message).to.include("Unsupported platform"); + } + }); + }); + + describe('getLatestRelease', () => { + let axiosGetStub: sinon.SinonStub; + + const mockRelease = { + assets: [ + { + url: "https://api.github.com/repos/matter-labs/era-test-node/releases/assets/1", + browser_download_url: "https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-aarch64-apple-darwin.tar.gz", + }, + { + url: "https://api.github.com/repos/matter-labs/era-test-node/releases/assets/2", + browser_download_url: "https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-x86_64-apple-darwin.tar.gz", + }, + { + url: "https://api.github.com/repos/matter-labs/era-test-node/releases/assets/3", + browser_download_url: "https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-x86_64-unknown-linux-gnu.tar.gz", + } + ] + }; + + beforeEach(() => { + axiosGetStub = sinon.stub(axios, 'get'); + }); + + afterEach(() => { + axiosGetStub.restore(); + }); + + it('should fetch the latest release successfully', async () => { + axiosGetStub.resolves({ data: mockRelease }); + + const result = await getLatestRelease('owner', 'repo', 'userAgent'); + expect(result).to.deep.equal(mockRelease); + + sinon.assert.calledOnce(axiosGetStub); + }); + + it('should handle errors when the server responds with a non-2xx status code', async () => { + const errorResponse = { + response: { + status: 404, + data: { + message: "Not Found" + } + } + }; + + axiosGetStub.rejects(errorResponse); + + try { + await getLatestRelease('owner', 'repo', 'userAgent'); + assert.fail("Expected an error to be thrown"); + } catch (error: any) { + expect(error.message).to.include("Failed to get latest release"); + expect(error.message).to.include("404"); + expect(error.message).to.include("Not Found"); + } + }); + + it('should handle errors when no response is received', async () => { + const errorNoResponse = { + request: {}, + message: "No response" + }; + + axiosGetStub.rejects(errorNoResponse); + + try { + await getLatestRelease('owner', 'repo', 'userAgent'); + assert.fail("Expected an error to be thrown"); + } catch (error: any) { + expect(error.message).to.include("No response received"); + } + }); + + it('should handle errors during request setup', async () => { + const errorSetup = { + message: "Setup error" + }; + + axiosGetStub.rejects(errorSetup); + + try { + await getLatestRelease('owner', 'repo', 'userAgent'); + assert.fail("Expected an error to be thrown"); + } catch (error: any) { + expect(error.message).to.include("Failed to set up the request"); + } + }); + }); + }); + + describe('RPCServerDownloader', () => { + let downloadStub: sinon.SinonStub; + let existsSyncStub: sinon.SinonStub; + let postProcessDownloadStub: sinon.SinonStub; + + beforeEach(() => { + downloadStub = sinon.stub(utils, 'download'); + existsSyncStub = sinon.stub(fs, 'existsSync'); + postProcessDownloadStub = sinon.stub(RPCServerDownloader.prototype as any, '_postProcessDownload').resolves(); // Stubbing the private method + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('isDownloaded', () => { + + it('should return true if binary exists', async () => { + const downloader = new RPCServerDownloader('/path/to/dir', 'version'); + existsSyncStub.returns(true); + + const result = await downloader.isDownloaded(); + expect(result).to.be.true; + }); + + it('should return false if binary does not exist', async () => { + const downloader = new RPCServerDownloader('/path/to/dir', 'version'); + existsSyncStub.returns(false); + + const result = await downloader.isDownloaded(); + expect(result).to.be.false; + }); + + }); + + describe('download', () => { + + it('should download the binary if not already downloaded', async () => { + const downloader = new RPCServerDownloader('/path/to/dir', 'version'); + existsSyncStub.returns(false); + + await downloader.download('http://example.com/binary'); - it('Should successfully compile a simple contract', async function () { - await this.env.run(TASK_NODE_ZKSYNC); + sinon.assert.calledOnce(downloadStub); + }); + + it('should throw an error if download fails', async () => { + const downloader = new RPCServerDownloader('/path/to/dir', 'version'); + downloadStub.throws(new Error('Mocked download failure')); + + try { + await downloader.download('http://example.com/binary'); + expect.fail('Expected an error to be thrown'); + } catch (error: any) { + expect(error.message).to.contain('Error downloading binary from URL'); + } + }); + + }); + + describe('getBinaryPath', () => { + it('should return the correct binary path', () => { + const downloader = new RPCServerDownloader('/path/to/dir', 'version'); + + const result = downloader.getBinaryPath(); + expect(result).to.equal('/path/to/dir/version'); + }); + }); + + }); + + describe('JsonRpcServer', () => { + interface ExecSyncError extends Error { + signal?: string; + } + + const execSyncStub = sinon.stub(); + let consoleInfoStub: sinon.SinonStub; + + // Because we cannot stub the execSync method directly, we use proxyquire to stub the entire 'child_process' module + const { JsonRpcServer } = proxyquire('../src/server', { + 'child_process': { execSync: execSyncStub } + }); + + beforeEach(() => { + execSyncStub.reset(); + consoleInfoStub = sinon.stub(console, 'info'); }); + + afterEach(() => { + consoleInfoStub.restore(); + }); + + describe('listen', () => { + it('should start the JSON-RPC server with the provided arguments', () => { + const server = new JsonRpcServer('/path/to/binary'); + const args = ['--arg1=value1', '--arg2=value2']; + + server.listen(args); + + sinon.assert.calledWith(execSyncStub, '/path/to/binary --arg1=value1 --arg2=value2'); + }); + + it('should print a starting message when server starts', () => { + const server = new JsonRpcServer('/path/to/binary'); + server.listen(); + + sinon.assert.calledWith(consoleInfoStub, chalk.green('Starting the JSON-RPC server with command: /path/to/binary ')); + }); + + it('should handle termination signals gracefully', () => { + const server = new JsonRpcServer('/path/to/binary'); + const error = new Error('Mocked error') as ExecSyncError; + error.signal = PROCESS_TERMINATION_SIGNALS[0]; // Let's simulate the first signal, e.g., 'SIGINT' + execSyncStub.throws(error); + + try { + server.listen(); + } catch (e) { + // We don't expect an error to be thrown for termination signals + expect.fail('Did not expect an error to be thrown'); + } + + sinon.assert.calledWith(consoleInfoStub, chalk.yellow(`Received ${PROCESS_TERMINATION_SIGNALS[0]} signal. The server process has exited.`)); + }); + + it('should throw an error if the server process exits with an error', () => { + const server = new JsonRpcServer('/path/to/binary'); + const error = new Error('Mocked error'); + execSyncStub.throws(error); + + try { + server.listen(); + expect.fail('Expected an error to be thrown'); + } catch (error: any) { + expect(error.message).to.equal('The server process has exited with an error: Mocked error'); + } + }); + }); + }); + + describe('Testing task', function () { + this.timeout(10000); // Increase timeout if needed + + let serverProcess: ChildProcess; + + function delay(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); + } + + afterEach(() => { + if (serverProcess) { + serverProcess.kill(); + } + }); + + it('Should successfully start the server', async function () { + // Start the server by running the task in a child process + serverProcess = spawn('ts-node', ['runHardhatTask.js', TASK_NODE_ZKSYNC], { + cwd: path.join(__dirname, 'fixture-projects', 'simple'), + }); + + // Send SIGINT to the serverProcess + serverProcess.kill(PROCESS_TERMINATION_SIGNALS[0] as NodeJS.Signals); + + // Wait for the server process to exit gracefully (i.e., not due to an error) + return await new Promise((resolve, reject) => { + serverProcess.on('exit', (code, signal) => { + if (signal === PROCESS_TERMINATION_SIGNALS[0]) { + resolve(); + } else { + reject(new Error(`Process was terminated by unexpected signal: ${signal}`)); + } + }); + }); + }); + + // it.only('Should return the correct chainID', async function () { + // const rpcUrl: string = 'http://localhost:8011'; + // const requestData = { + // jsonrpc: "2.0", + // id: 1, + // method: "eth_chainId", + // params: [] + // }; + + // serverProcess = spawn('ts-node', ['runHardhatTask.js', TASK_NODE_ZKSYNC], { + // cwd: path.join(__dirname, 'fixture-projects', 'simple'), + // }); + + + // await delay(2000); + + // try { + // const response = await axios.post(rpcUrl, requestData); + // const chainId: string = response.data.result; + + // assert.strictEqual(chainId, '0x104', 'Unexpected chainId received from the server'); + // } catch (error: any) { + // throw new Error(`Failed to get chainId from the server: ${error.message}`); + // } finally { + // serverProcess.kill(PROCESS_TERMINATION_SIGNALS[0] as NodeJS.Signals); + // } + // }); }); }); diff --git a/yarn.lock b/yarn.lock index 90a3a49ec..02e96f1d5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1246,6 +1246,13 @@ dependencies: type-detect "4.0.8" +"@sinonjs/commons@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" + integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== + dependencies: + type-detect "4.0.8" + "@sinonjs/fake-timers@10.0.2": version "10.0.2" resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.0.2.tgz#d10549ed1f423d80639c528b6c7f5a1017747d0c" @@ -1253,6 +1260,13 @@ dependencies: "@sinonjs/commons" "^2.0.0" +"@sinonjs/fake-timers@^10.0.2", "@sinonjs/fake-timers@^10.3.0": + version "10.3.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" + integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== + dependencies: + "@sinonjs/commons" "^3.0.0" + "@sinonjs/fake-timers@^7.0.4": version "7.1.2" resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz#2524eae70c4910edccf99b2f4e6efc5894aff7b5" @@ -1269,6 +1283,15 @@ lodash.get "^4.4.2" type-detect "^4.0.8" +"@sinonjs/samsam@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-8.0.0.tgz#0d488c91efb3fa1442e26abea81759dfc8b5ac60" + integrity sha512-Bp8KUVlLp8ibJZrnvq2foVhP0IVX2CIprMJPK0vqGqgrDa0OHVKeZyBykqskkrdxV6yKBPmGasO8LVjAKR3Gew== + dependencies: + "@sinonjs/commons" "^2.0.0" + lodash.get "^4.4.2" + type-detect "^4.0.8" + "@sinonjs/text-encoding@^0.7.1": version "0.7.2" resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918" @@ -1471,6 +1494,11 @@ dependencies: "@types/retry" "*" +"@types/proxyquire@^1.3.29": + version "1.3.29" + resolved "https://registry.yarnpkg.com/@types/proxyquire/-/proxyquire-1.3.29.tgz#bfcc85e993d4544c670b8973bbef4a90c7e42c74" + integrity sha512-8/JYXN9NmE4tEGUU/JI7FcvloTu7CxYkb01h4kI+HRvABxwpleLXsvVmOF85LlgEb/xwe+H8MwM4s3ushNuffg== + "@types/readable-stream@^2.3.13": version "2.3.15" resolved "https://registry.yarnpkg.com/@types/readable-stream/-/readable-stream-2.3.15.tgz#3d79c9ceb1b6a57d5f6e6976f489b9b5384321ae" @@ -2690,7 +2718,7 @@ diff@^4.0.1: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== -diff@^5.0.0: +diff@^5.0.0, diff@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== @@ -3334,6 +3362,14 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" +fill-keys@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/fill-keys/-/fill-keys-1.0.2.tgz#9a8fa36f4e8ad634e3bf6b4f3c8882551452eb20" + integrity sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA== + dependencies: + is-object "~1.0.1" + merge-descriptors "~1.0.0" + fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -3975,6 +4011,13 @@ is-core-module@^2.11.0: dependencies: has "^1.0.3" +is-core-module@^2.13.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.0.tgz#bb52aa6e2cbd49a30c2ba68c42bf3435ba6072db" + integrity sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ== + dependencies: + has "^1.0.3" + is-core-module@^2.8.0: version "2.10.0" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" @@ -4033,6 +4076,11 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== +is-object@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" + integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== + is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" @@ -4478,6 +4526,11 @@ meow@^6.0.0: type-fest "^0.13.1" yargs-parser "^18.1.3" +merge-descriptors@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" @@ -4683,6 +4736,11 @@ module-error@^1.0.1, module-error@^1.0.2: resolved "https://registry.yarnpkg.com/module-error/-/module-error-1.0.2.tgz#8d1a48897ca883f47a45816d4fb3e3c6ba404d86" integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA== +module-not-found-error@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/module-not-found-error/-/module-not-found-error-1.0.1.tgz#cf8b4ff4f29640674d6cdd02b0e3bc523c2bbdc0" + integrity sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g== + ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -4744,6 +4802,17 @@ nise@^5.1.2: just-extend "^4.0.2" path-to-regexp "^1.7.0" +nise@^5.1.4: + version "5.1.4" + resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.4.tgz#491ce7e7307d4ec546f5a659b2efe94a18b4bbc0" + integrity sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg== + dependencies: + "@sinonjs/commons" "^2.0.0" + "@sinonjs/fake-timers" "^10.0.2" + "@sinonjs/text-encoding" "^0.7.1" + just-extend "^4.0.2" + path-to-regexp "^1.7.0" + node-addon-api@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" @@ -5136,6 +5205,15 @@ proxy-from-env@^1.1.0: resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== +proxyquire@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/proxyquire/-/proxyquire-2.1.3.tgz#2049a7eefa10a9a953346a18e54aab2b4268df39" + integrity sha512-BQWfCqYM+QINd+yawJz23tbBM40VIGXOdDw3X344KcclI/gtBbdWF6SlQ4nK/bYhF9d27KYug9WzljHC6B9Ysg== + dependencies: + fill-keys "^1.0.2" + module-not-found-error "^1.0.1" + resolve "^1.11.1" + pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" @@ -5332,6 +5410,15 @@ resolve@^1.10.0, resolve@^1.20.0: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +resolve@^1.11.1: + version "1.22.6" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.6.tgz#dd209739eca3aef739c626fea1b4f3c506195362" + integrity sha512-njhxM7mV12JfufShqGy3Rz8j11RPdLy4xi15UurGJeoHLfJpVXKdh3ueuOqbYUcDZnffr6X739JBo5LzyahEsw== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" @@ -5539,6 +5626,18 @@ sinon@^15.0.1: nise "^5.1.2" supports-color "^7.2.0" +sinon@^16.0.0: + version "16.0.0" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-16.0.0.tgz#06da4e63624b946c9d7e67cce21c2f67f40f23a9" + integrity sha512-B8AaZZm9CT5pqe4l4uWJztfD/mOTa7dL8Qo0W4+s+t74xECOgSZDDQCBjNgIK3+n4kyxQrSTv2V5ul8K25qkiQ== + dependencies: + "@sinonjs/commons" "^3.0.0" + "@sinonjs/fake-timers" "^10.3.0" + "@sinonjs/samsam" "^8.0.0" + diff "^5.1.0" + nise "^5.1.4" + supports-color "^7.2.0" + slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" From ab8e2878459c0d9fdda266582d03af429bc43c8e Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Tue, 3 Oct 2023 12:49:19 +0200 Subject: [PATCH 06/17] Add logs when downloading, expand task arguments --- packages/hardhat-zksync-node/package.json | 2 +- packages/hardhat-zksync-node/src/constants.ts | 1 + .../hardhat-zksync-node/src/downloader.ts | 5 ++ packages/hardhat-zksync-node/src/index.ts | 52 +++++++++++------- packages/hardhat-zksync-node/src/server.ts | 8 ++- packages/hardhat-zksync-node/src/types.ts | 8 ++- packages/hardhat-zksync-node/src/utils.ts | 45 ++++++++++++---- packages/hardhat-zksync-node/test/tests.ts | 53 +++++++++++++++---- 8 files changed, 134 insertions(+), 40 deletions(-) diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index aae601032..e03fd832e 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -1,7 +1,7 @@ { "name": "@matterlabs/hardhat-zksync-node", "version": "0.0.1", - "description": "Hardhat plugin to run zkSync node locally", + "description": "Hardhat plugin to run zkSync era-test-node locally", "repository": "github:matter-labs/hardhat-zksync", "homepage": "https://github.com/matter-labs/hardhat-zksync/tree/main/packages/hardhat-zksync-node", "author": "Matter Labs", diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index 04a911eef..10283f139 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -12,6 +12,7 @@ export const PROCESS_TERMINATION_SIGNALS = ['SIGINT', 'SIGTERM']; export const ALLOWED_LOG_VALUES = ['error', 'warn', 'info', 'debug']; export const ALLOWED_CACHE_VALUES = ['none', 'disk', 'memory']; export const ALLOWED_FORK_VALUES = ['testnet', 'mainnet']; +export const ALLOWED_SHOW_CALLS_VALUES = ['none', 'user', 'system', 'all']; export const ALLOWED_SHOW_STORAGE_LOGS_VALUES = ['none', 'read', 'write', 'all']; export const ALLOWED_SHOW_VM_DETAILS_VALUES = ['none', 'all']; export const ALLOWED_SHOW_GAS_DETAILS_VALUES = ['none', 'all']; diff --git a/packages/hardhat-zksync-node/src/downloader.ts b/packages/hardhat-zksync-node/src/downloader.ts index f850d35e2..1f78ffb39 100644 --- a/packages/hardhat-zksync-node/src/downloader.ts +++ b/packages/hardhat-zksync-node/src/downloader.ts @@ -4,6 +4,7 @@ import fse from 'fs-extra'; import { download } from './utils'; import { ZkSyncNodePluginError } from './errors'; import { PLUGIN_NAME } from './constants'; +import chalk from 'chalk'; export class RPCServerDownloader { private readonly _binaryDir: string; @@ -20,8 +21,12 @@ export class RPCServerDownloader { public async download(url: string): Promise { try { + console.info(chalk.yellow(`Downloading era-test-node binary, release: ${this._version}`)); + await download(url, this.getBinaryPath(), PLUGIN_NAME, this._version, 30000); await this._postProcessDownload(); + + console.info(chalk.green('era-test-node binary downloaded successfully')); } catch (error: any) { throw new ZkSyncNodePluginError(`Error downloading binary from URL ${url}: ${error.message}`); } diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index 996fe4451..c84ba70de 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -66,59 +66,75 @@ subtask(TASK_NODE_ZKSYNC_CREATE_SERVER, 'Creates a JSON-RPC server for zkSync no // Main task of the plugin. It starts the server and listens for requests. task(TASK_NODE_ZKSYNC, 'Starts a JSON-RPC server for zkSync node') - .addOptionalParam('log', 'Logging level (error, warn, info, debug)', undefined, types.string) - .addOptionalParam('logFilePath', 'Path to the file where logs should be written', undefined, types.string) - .addOptionalParam('cache', 'Cache network request (none, disk, memory)', undefined, types.string) - .addOptionalParam('cacheDir', 'Path to the directory where cache should be stored', undefined, types.string) - .addFlag('resetCache', 'Reset cache before start') - .addOptionalParam('fork', 'Fork from the specified network (testnet, mainnet)', undefined, types.string) - .addOptionalParam('showSorageLogs', 'Show storage logs (none, read, write, all)', undefined, types.string) - .addOptionalParam('showVmDetails', 'Show VM details (none, all)', undefined, types.string) - .addOptionalParam('showGasDetails', 'Show gas details (none, all)', undefined, types.string) - .addFlag('showCalls', 'Print more detailed call traces') - .addFlag('resolveHashes', 'Ask openchain for ABI names') - .addFlag('force', 'Force download even if the binary already exists') + .addOptionalParam('port', 'Port to listen on - default: 8011', undefined, types.int) + .addOptionalParam('log', 'Log filter level (error, warn, info, debug) - default: info', undefined, types.string) + .addOptionalParam('logFilePath', 'Path to the file where logs should be written - default: `era_test_node.log`', undefined, types.string) + .addOptionalParam('cache', 'Cache type (none, disk, memory) - default: disk', undefined, types.string) + .addOptionalParam('cacheDir', 'Cache directory location for `disk` cache - default: `.cache`', undefined, types.string) + .addFlag('resetCache', 'Reset the local `disk` cache') + .addOptionalParam('showCalls', 'Show call debug information (none, user, system, all) - default: none', undefined, types.string) + .addOptionalParam('showStorageLogs', 'Show storage log information (none, read, write, all) - default: none', undefined, types.string) + .addOptionalParam('showVmDetails', 'Show VM details information (none, all) - default: none', undefined, types.string) + .addOptionalParam('showGasDetails', 'Show Gas details information (none, all) - default: none', undefined, types.string) + .addFlag('resolveHashes', 'Try to contact openchain to resolve the ABI & topic names. It enabled, it makes debug log more readable, but will decrease the performance') + .addFlag('devUseLocalContracts', 'Loads the locally compiled system contracts (useful when doing changes to system contracts or bootloader)') + .addOptionalParam('fork', 'Starts a local network that is a fork of another network (testnet, mainnet, http://XXX:YY)', undefined, types.string) + .addOptionalParam('forkBlockNumber', 'Fork at the specified block height', undefined, types.int) + .addOptionalParam('replayTx', 'Transaction hash to replay', undefined, types.string) + // .addFlag('force', 'Force download even if the binary already exists') .setAction( async ( { + port, log, logFilePath, cache, cacheDir, resetCache, - fork, + showCalls, showStorageLogs, showVmDetails, showGasDetails, - showCalls, resolveHashes, + devUseLocalContracts, + fork, + forkBlockNumber, + replayTx, }: { + port: number; log: string; logFilePath: string; cache: string; cacheDir: string; resetCache: boolean; - fork: string; + showCalls: string; showStorageLogs: string; showVmDetails: string; showGasDetails: string; - showCalls: boolean; resolveHashes: boolean; + devUseLocalContracts: boolean; + fork: string; + forkBlockNumber: number; + replayTx: string; }, { run } ) => { const commandArgs = constructCommandArgs({ + port, log, logFilePath, cache, cacheDir, resetCache, - fork, + showCalls, showStorageLogs, showVmDetails, showGasDetails, - showCalls, resolveHashes, + devUseLocalContracts, + fork, + forkBlockNumber, + replayTx, }); // Download the binary diff --git a/packages/hardhat-zksync-node/src/server.ts b/packages/hardhat-zksync-node/src/server.ts index 9c5643b27..d5e9646cb 100644 --- a/packages/hardhat-zksync-node/src/server.ts +++ b/packages/hardhat-zksync-node/src/server.ts @@ -12,8 +12,14 @@ export class JsonRpcServer { public listen(args: string[] = []): void { const command = `${this._serverBinaryPath} ${args.join(' ')}`; + + const portArg = args.find(arg => arg.startsWith('--port=')); + const port = portArg ? parseInt(portArg.split('=')[1], 10) : 8011; + try { - console.info(chalk.green(`Starting the JSON-RPC server with command: ${command}`)); + console.info(chalk.green(`Starting the JSON-RPC server at 127.0.0.1:${port}`)); + console.info(chalk.green(`Running command: ${command}`)); + execSync(command, { stdio: 'inherit' }); } catch (error: any) { if (PROCESS_TERMINATION_SIGNALS.includes(error.signal)) { diff --git a/packages/hardhat-zksync-node/src/types.ts b/packages/hardhat-zksync-node/src/types.ts index 6f037ef61..de9014c14 100644 --- a/packages/hardhat-zksync-node/src/types.ts +++ b/packages/hardhat-zksync-node/src/types.ts @@ -1,13 +1,17 @@ export interface CommandArguments { + port?: number; log?: string; logFilePath?: string; cache?: string; cacheDir?: string; resetCache?: boolean; - fork?: string; + showCalls?: string; showStorageLogs?: string; showVmDetails?: string; showGasDetails?: string; - showCalls?: boolean; resolveHashes?: boolean; + devUseLocalContracts?: boolean; + fork?: string; + forkBlockNumber?: number; + replayTx?: string; } diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index 7892430e0..dfe3d1443 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -10,6 +10,7 @@ import { ALLOWED_CACHE_VALUES, ALLOWED_FORK_VALUES, ALLOWED_LOG_VALUES, + ALLOWED_SHOW_CALLS_VALUES, ALLOWED_SHOW_GAS_DETAILS_VALUES, ALLOWED_SHOW_STORAGE_LOGS_VALUES, ALLOWED_SHOW_VM_DETAILS_VALUES, @@ -25,6 +26,10 @@ import { getCompilersDir } from 'hardhat/internal/util/global-dir'; export function constructCommandArgs(args: CommandArguments): string[] { const commandArgs: string[] = []; + if (args.port) { + commandArgs.push(`--port=${args.port}`); + } + if (args.log) { if (!ALLOWED_LOG_VALUES.includes(args.log)) { throw new ZkSyncNodePluginError(`Invalid log value: ${args.log}`); @@ -51,13 +56,6 @@ export function constructCommandArgs(args: CommandArguments): string[] { commandArgs.push(`--reset-cache`); } - if (args.fork) { - if (!ALLOWED_FORK_VALUES.includes(args.fork)) { - throw new ZkSyncNodePluginError(`Invalid fork value: ${args.fork}`); - } - commandArgs.push(`--fork=${args.fork}`); - } - if (args.showStorageLogs) { if (!ALLOWED_SHOW_STORAGE_LOGS_VALUES.includes(args.showStorageLogs)) { throw new ZkSyncNodePluginError(`Invalid showStorageLogs value: ${args.showStorageLogs}`); @@ -80,14 +78,43 @@ export function constructCommandArgs(args: CommandArguments): string[] { } if (args.showCalls) { - commandArgs.push(`--show-calls`); + if (!ALLOWED_SHOW_CALLS_VALUES.includes(args.showCalls)) { + throw new ZkSyncNodePluginError(`Invalid showCalls value: ${args.showCalls}`); + } + commandArgs.push(`--show-calls=${args.showCalls}`); } if (args.resolveHashes) { commandArgs.push(`--resolve-hashes`); } - commandArgs.push('run'); + if (args.devUseLocalContracts) { + commandArgs.push(`--dev-use-local-contracts`); + } + + if (args.fork) { + const urlPattern = /^http(s)?:\/\/[^\s]+$/; + if (!ALLOWED_FORK_VALUES.includes(args.fork) && !urlPattern.test(args.fork)) { + throw new ZkSyncNodePluginError(`Invalid fork network value: ${args.fork}`); + } + + // Throw an error if both forkBlockNumber and replayTx are specified + if (args.forkBlockNumber && args.replayTx) { + throw new ZkSyncNodePluginError( + `Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them.` + ); + } + + if (args.forkBlockNumber) { + commandArgs.push(`fork --fork-at ${args.forkBlockNumber} ${args.fork}`); + } else if(args.replayTx) { + commandArgs.push(`replay_tx ${args.fork} ${args.replayTx}`); + } else { + commandArgs.push(`fork ${args.fork}`); + } + } else { + commandArgs.push('run'); + } return commandArgs; } diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index d74c7c1d7..87805a852 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -9,43 +9,78 @@ import { spawn, ChildProcess } from "child_process"; import * as utils from '../src/utils'; import { constructCommandArgs, getLatestRelease, getAssetToDownload, download } from '../src/utils'; +import { ZkSyncNodePluginError } from '../src/errors'; import { RPCServerDownloader } from '../src/downloader'; import { TASK_NODE_ZKSYNC, PROCESS_TERMINATION_SIGNALS } from '../src/constants'; describe('node-zksync plugin', async function () { describe('Utils', () => { describe('constructCommandArgs', () => { - it('should correctly construct command arguments', () => { + it('should construct command arguments with minimum args', () => { + const args = {}; + const result = constructCommandArgs(args); + expect(result).to.deep.equal(['run']); + }); + + it('should correctly construct command arguments with all args', () => { const args = { + port: 8012, log: 'error', logFilePath: '/path/to/log', cache: 'disk', cacheDir: '/path/to/cache', resetCache: true, - fork: 'mainnet', showStorageLogs: 'all', showVmDetails: 'none', showGasDetails: 'all', - showCalls: true, - resolveHashes: true + showCalls: 'user', + resolveHashes: true, + devUseLocalContracts: true, + fork: 'mainnet', + forkBlockNumber: 100, }; - + const result = constructCommandArgs(args); expect(result).to.deep.equal([ + '--port=8012', '--log=error', '--log-file-path=/path/to/log', '--cache=disk', '--cache-dir=/path/to/cache', '--reset-cache', - '--fork=mainnet', '--show-storage-logs=all', '--show-vm-details=none', '--show-gas-details=all', - '--show-calls', + '--show-calls=user', '--resolve-hashes', - 'run' + '--dev-use-local-contracts', + 'fork --fork-at 100 mainnet' ]); }); + + it('should throw error when both forkBlockNumber and replayTx are specified in all args', () => { + const args = { fork: 'mainnet', forkBlockNumber: 100, replayTx: '0x1234567890abcdef' }; + expect(() => constructCommandArgs(args)).to.throw("Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them."); + }); + + it('should throw error for invalid log value', () => { + const args = { log: 'invalid' }; + expect(() => constructCommandArgs(args)).to.throw("Invalid log value: invalid"); + }); + + it('should correctly construct command arguments with fork and replayTx', () => { + const args = { fork: 'http://example.com', replayTx: '0x1234567890abcdef' }; + const result = constructCommandArgs(args); + expect(result).to.deep.equal(['replay_tx http://example.com 0x1234567890abcdef']); + }); + + it('should throw error for invalid fork URL pattern', () => { + const args = { + fork: 'invalidURL', + }; + + expect(() => constructCommandArgs(args)).to.throw("Invalid fork network value: invalidURL"); + }); }); describe('getAssetToDownload', () => { @@ -300,7 +335,7 @@ describe('node-zksync plugin', async function () { const server = new JsonRpcServer('/path/to/binary'); server.listen(); - sinon.assert.calledWith(consoleInfoStub, chalk.green('Starting the JSON-RPC server with command: /path/to/binary ')); + sinon.assert.calledWith(consoleInfoStub, chalk.green('Starting the JSON-RPC server at 127.0.0.1:8011')); }); it('should handle termination signals gracefully', () => { From 7847c618b4bff69075fd36b9a796bb4869bd8ea1 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Tue, 3 Oct 2023 14:32:26 +0200 Subject: [PATCH 07/17] Add checks for --fork-block-number and --replay-tx parameters --- packages/hardhat-zksync-node/src/utils.ts | 15 ++++++++------- packages/hardhat-zksync-node/test/tests.ts | 6 +++++- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index dfe3d1443..ecacc04c0 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -92,19 +92,20 @@ export function constructCommandArgs(args: CommandArguments): string[] { commandArgs.push(`--dev-use-local-contracts`); } + if (args.forkBlockNumber && args.replayTx) { + throw new ZkSyncNodePluginError(`Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them.`); + } + + if ((args.replayTx || args.forkBlockNumber) && !args.fork) { + throw new ZkSyncNodePluginError(`Cannot specify --replay-tx or --fork-block-number parameters without --fork param.`); + } + if (args.fork) { const urlPattern = /^http(s)?:\/\/[^\s]+$/; if (!ALLOWED_FORK_VALUES.includes(args.fork) && !urlPattern.test(args.fork)) { throw new ZkSyncNodePluginError(`Invalid fork network value: ${args.fork}`); } - // Throw an error if both forkBlockNumber and replayTx are specified - if (args.forkBlockNumber && args.replayTx) { - throw new ZkSyncNodePluginError( - `Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them.` - ); - } - if (args.forkBlockNumber) { commandArgs.push(`fork --fork-at ${args.forkBlockNumber} ${args.fork}`); } else if(args.replayTx) { diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index 87805a852..0b55b8c02 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -9,7 +9,6 @@ import { spawn, ChildProcess } from "child_process"; import * as utils from '../src/utils'; import { constructCommandArgs, getLatestRelease, getAssetToDownload, download } from '../src/utils'; -import { ZkSyncNodePluginError } from '../src/errors'; import { RPCServerDownloader } from '../src/downloader'; import { TASK_NODE_ZKSYNC, PROCESS_TERMINATION_SIGNALS } from '../src/constants'; @@ -68,6 +67,11 @@ describe('node-zksync plugin', async function () { expect(() => constructCommandArgs(args)).to.throw("Invalid log value: invalid"); }); + it('should throw error when there is no fork arg', () => { + const args = { forkBlockNumber: 100}; + expect(() => constructCommandArgs(args)).to.throw("Cannot specify --replay-tx or --fork-block-number parameters without --fork param."); + }); + it('should correctly construct command arguments with fork and replayTx', () => { const args = { fork: 'http://example.com', replayTx: '0x1234567890abcdef' }; const result = constructCommandArgs(args); From dbd13e7ff65eafb44a75f4401a4f79302bc7170a Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Tue, 3 Oct 2023 14:48:35 +0200 Subject: [PATCH 08/17] Add GH workflow for publishing hardhat-zksync-node plugin --- .github/workflows/publish-node.yaml | 59 +++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 .github/workflows/publish-node.yaml diff --git a/.github/workflows/publish-node.yaml b/.github/workflows/publish-node.yaml new file mode 100644 index 000000000..0917afb08 --- /dev/null +++ b/.github/workflows/publish-node.yaml @@ -0,0 +1,59 @@ +name: Publish Node Plugin + +on: + push: + tags: + - '@matterlabs/hardhat-zksync-node@*' + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v3 + with: + node-version: '16' + registry-url: 'https://registry.npmjs.org' + cache: 'yarn' + + - name: Setup environment + run: yarn && yarn build + + - name: Publish + env: + NODE_AUTH_TOKEN: ${{ secrets.NPMJS_NPM_MATTERLABS_AUTOMATION_TOKEN }} + run: | + if [[ ${{ github.ref }} == *"beta"* ]]; then + echo "Publishing package with beta tag" + npm publish @matterlabs/hardhat-zksync-node --tag beta --workspace=packages/hardhat-zksync-node --access=public + elif [[ ${{ github.ref }} == *"alpha"* ]]; then + echo "Publishing package with alpha tag" + npm publish @matterlabs/hardhat-zksync-node --tag alpha --workspace=packages/hardhat-zksync-node --access=public + else + echo "Publishing package with latest tag" + npm publish @matterlabs/hardhat-zksync-node --tag latest --workspace=packages/hardhat-zksync-node --access=public + fi + + - name: Create github release from tags + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + if [[ ${{ github.ref }} != *"beta"* ]] && [[ ${{ github.ref }} != *"alpha"* ]]; then + node scripts/create-release-from-tags/run.js --package hardhat-zksync-node + else + echo "Skipping github release creation for beta and alpha tags" + fi + + - name: Create the MM Message + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh -R matter-labs/hardhat-zksync release view "$GITHUB_REF_NAME" --json tagName,body --template '## {{.tagName}} {{"\n"}}{{.body}}' > ./release_info + jq --null-input --arg text "$(cat ./release_info)" '{"text": $text}' > mattermost.json + + - uses: mattermost/action-mattermost-notify@master + env: + MATTERMOST_USERNAME: "Hardhat Release Bot" + MATTERMOST_WEBHOOK_URL: ${{ secrets.MATTERMOST_URL }} + From 98b8b93989e9ede8aaecb477e457862aed715c4f Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Tue, 3 Oct 2023 15:07:20 +0200 Subject: [PATCH 09/17] Change hardhat-zksync-node package version --- packages/hardhat-zksync-node/README.md | 2 +- packages/hardhat-zksync-node/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/hardhat-zksync-node/README.md b/packages/hardhat-zksync-node/README.md index 4b30a29c2..c5783a7b5 100644 --- a/packages/hardhat-zksync-node/README.md +++ b/packages/hardhat-zksync-node/README.md @@ -1,3 +1,3 @@ # hardhat-zksync-node -[Hardhat](https://hardhat.org/) plugin to run the zkSync node locally. +[Hardhat](https://hardhat.org/) plugin to run the zkSync era-test-node locally. diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index e03fd832e..db085088a 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -1,6 +1,6 @@ { "name": "@matterlabs/hardhat-zksync-node", - "version": "0.0.1", + "version": "0.0.1-beta.1", "description": "Hardhat plugin to run zkSync era-test-node locally", "repository": "github:matter-labs/hardhat-zksync", "homepage": "https://github.com/matter-labs/hardhat-zksync/tree/main/packages/hardhat-zksync-node", From 9752e4426062006e08baaebcff05eabb8de96b0c Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Tue, 3 Oct 2023 15:27:43 +0200 Subject: [PATCH 10/17] Run prettier --- .../hardhat-zksync-node/src/downloader.ts | 2 +- packages/hardhat-zksync-node/src/index.ts | 59 ++++++++++-- packages/hardhat-zksync-node/src/server.ts | 4 +- packages/hardhat-zksync-node/src/utils.ts | 12 ++- packages/hardhat-zksync-node/test/tests.ts | 96 ++++++++++--------- 5 files changed, 112 insertions(+), 61 deletions(-) diff --git a/packages/hardhat-zksync-node/src/downloader.ts b/packages/hardhat-zksync-node/src/downloader.ts index 1f78ffb39..b6d419018 100644 --- a/packages/hardhat-zksync-node/src/downloader.ts +++ b/packages/hardhat-zksync-node/src/downloader.ts @@ -22,7 +22,7 @@ export class RPCServerDownloader { public async download(url: string): Promise { try { console.info(chalk.yellow(`Downloading era-test-node binary, release: ${this._version}`)); - + await download(url, this.getBinaryPath(), PLUGIN_NAME, this._version, 30000); await this._postProcessDownload(); diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index c84ba70de..a416285bc 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -68,17 +68,58 @@ subtask(TASK_NODE_ZKSYNC_CREATE_SERVER, 'Creates a JSON-RPC server for zkSync no task(TASK_NODE_ZKSYNC, 'Starts a JSON-RPC server for zkSync node') .addOptionalParam('port', 'Port to listen on - default: 8011', undefined, types.int) .addOptionalParam('log', 'Log filter level (error, warn, info, debug) - default: info', undefined, types.string) - .addOptionalParam('logFilePath', 'Path to the file where logs should be written - default: `era_test_node.log`', undefined, types.string) + .addOptionalParam( + 'logFilePath', + 'Path to the file where logs should be written - default: `era_test_node.log`', + undefined, + types.string + ) .addOptionalParam('cache', 'Cache type (none, disk, memory) - default: disk', undefined, types.string) - .addOptionalParam('cacheDir', 'Cache directory location for `disk` cache - default: `.cache`', undefined, types.string) + .addOptionalParam( + 'cacheDir', + 'Cache directory location for `disk` cache - default: `.cache`', + undefined, + types.string + ) .addFlag('resetCache', 'Reset the local `disk` cache') - .addOptionalParam('showCalls', 'Show call debug information (none, user, system, all) - default: none', undefined, types.string) - .addOptionalParam('showStorageLogs', 'Show storage log information (none, read, write, all) - default: none', undefined, types.string) - .addOptionalParam('showVmDetails', 'Show VM details information (none, all) - default: none', undefined, types.string) - .addOptionalParam('showGasDetails', 'Show Gas details information (none, all) - default: none', undefined, types.string) - .addFlag('resolveHashes', 'Try to contact openchain to resolve the ABI & topic names. It enabled, it makes debug log more readable, but will decrease the performance') - .addFlag('devUseLocalContracts', 'Loads the locally compiled system contracts (useful when doing changes to system contracts or bootloader)') - .addOptionalParam('fork', 'Starts a local network that is a fork of another network (testnet, mainnet, http://XXX:YY)', undefined, types.string) + .addOptionalParam( + 'showCalls', + 'Show call debug information (none, user, system, all) - default: none', + undefined, + types.string + ) + .addOptionalParam( + 'showStorageLogs', + 'Show storage log information (none, read, write, all) - default: none', + undefined, + types.string + ) + .addOptionalParam( + 'showVmDetails', + 'Show VM details information (none, all) - default: none', + undefined, + types.string + ) + .addOptionalParam( + 'showGasDetails', + 'Show Gas details information (none, all) - default: none', + undefined, + types.string + ) + .addFlag( + 'resolveHashes', + 'Try to contact openchain to resolve the ABI & topic names. It enabled, it makes debug log more readable, but will decrease the performance' + ) + .addFlag( + 'devUseLocalContracts', + 'Loads the locally compiled system contracts (useful when doing changes to system contracts or bootloader)' + ) + .addOptionalParam( + 'fork', + 'Starts a local network that is a fork of another network (testnet, mainnet, http://XXX:YY)', + undefined, + types.string + ) .addOptionalParam('forkBlockNumber', 'Fork at the specified block height', undefined, types.int) .addOptionalParam('replayTx', 'Transaction hash to replay', undefined, types.string) // .addFlag('force', 'Force download even if the binary already exists') diff --git a/packages/hardhat-zksync-node/src/server.ts b/packages/hardhat-zksync-node/src/server.ts index d5e9646cb..7ddbf9103 100644 --- a/packages/hardhat-zksync-node/src/server.ts +++ b/packages/hardhat-zksync-node/src/server.ts @@ -13,13 +13,13 @@ export class JsonRpcServer { public listen(args: string[] = []): void { const command = `${this._serverBinaryPath} ${args.join(' ')}`; - const portArg = args.find(arg => arg.startsWith('--port=')); + const portArg = args.find((arg) => arg.startsWith('--port=')); const port = portArg ? parseInt(portArg.split('=')[1], 10) : 8011; try { console.info(chalk.green(`Starting the JSON-RPC server at 127.0.0.1:${port}`)); console.info(chalk.green(`Running command: ${command}`)); - + execSync(command, { stdio: 'inherit' }); } catch (error: any) { if (PROCESS_TERMINATION_SIGNALS.includes(error.signal)) { diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index ecacc04c0..7fb5fbe9b 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -93,11 +93,15 @@ export function constructCommandArgs(args: CommandArguments): string[] { } if (args.forkBlockNumber && args.replayTx) { - throw new ZkSyncNodePluginError(`Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them.`); + throw new ZkSyncNodePluginError( + `Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them.` + ); } - + if ((args.replayTx || args.forkBlockNumber) && !args.fork) { - throw new ZkSyncNodePluginError(`Cannot specify --replay-tx or --fork-block-number parameters without --fork param.`); + throw new ZkSyncNodePluginError( + `Cannot specify --replay-tx or --fork-block-number parameters without --fork param.` + ); } if (args.fork) { @@ -108,7 +112,7 @@ export function constructCommandArgs(args: CommandArguments): string[] { if (args.forkBlockNumber) { commandArgs.push(`fork --fork-at ${args.forkBlockNumber} ${args.fork}`); - } else if(args.replayTx) { + } else if (args.replayTx) { commandArgs.push(`replay_tx ${args.fork} ${args.replayTx}`); } else { commandArgs.push(`fork ${args.fork}`); diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index 0b55b8c02..045edfacb 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -5,7 +5,7 @@ import axios from 'axios'; import fs from 'fs'; import path from 'path'; import proxyquire from 'proxyquire'; -import { spawn, ChildProcess } from "child_process"; +import { spawn, ChildProcess } from 'child_process'; import * as utils from '../src/utils'; import { constructCommandArgs, getLatestRelease, getAssetToDownload, download } from '../src/utils'; @@ -20,7 +20,7 @@ describe('node-zksync plugin', async function () { const result = constructCommandArgs(args); expect(result).to.deep.equal(['run']); }); - + it('should correctly construct command arguments with all args', () => { const args = { port: 8012, @@ -38,7 +38,7 @@ describe('node-zksync plugin', async function () { fork: 'mainnet', forkBlockNumber: 100, }; - + const result = constructCommandArgs(args); expect(result).to.deep.equal([ '--port=8012', @@ -53,23 +53,27 @@ describe('node-zksync plugin', async function () { '--show-calls=user', '--resolve-hashes', '--dev-use-local-contracts', - 'fork --fork-at 100 mainnet' + 'fork --fork-at 100 mainnet', ]); }); it('should throw error when both forkBlockNumber and replayTx are specified in all args', () => { const args = { fork: 'mainnet', forkBlockNumber: 100, replayTx: '0x1234567890abcdef' }; - expect(() => constructCommandArgs(args)).to.throw("Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them."); + expect(() => constructCommandArgs(args)).to.throw( + 'Cannot specify both --fork-block-number and --replay-tx. Please specify only one of them.' + ); }); it('should throw error for invalid log value', () => { const args = { log: 'invalid' }; - expect(() => constructCommandArgs(args)).to.throw("Invalid log value: invalid"); + expect(() => constructCommandArgs(args)).to.throw('Invalid log value: invalid'); }); it('should throw error when there is no fork arg', () => { - const args = { forkBlockNumber: 100}; - expect(() => constructCommandArgs(args)).to.throw("Cannot specify --replay-tx or --fork-block-number parameters without --fork param."); + const args = { forkBlockNumber: 100 }; + expect(() => constructCommandArgs(args)).to.throw( + 'Cannot specify --replay-tx or --fork-block-number parameters without --fork param.' + ); }); it('should correctly construct command arguments with fork and replayTx', () => { @@ -82,8 +86,8 @@ describe('node-zksync plugin', async function () { const args = { fork: 'invalidURL', }; - - expect(() => constructCommandArgs(args)).to.throw("Invalid fork network value: invalidURL"); + + expect(() => constructCommandArgs(args)).to.throw('Invalid fork network value: invalidURL'); }); }); @@ -96,8 +100,8 @@ describe('node-zksync plugin', async function () { assets: [ { name: 'era_test_node-v0.1.0-aarch64-apple-darwin.tar.gz' }, { name: 'era_test_node-v0.1.0-x86_64-apple-darwin.tar.gz' }, - { name: 'era_test_node-v0.1.0-x86_64-unknown-linux-gnu.tar.gz' } - ] + { name: 'era_test_node-v0.1.0-x86_64-unknown-linux-gnu.tar.gz' }, + ], }; beforeEach(() => { @@ -135,7 +139,7 @@ describe('node-zksync plugin', async function () { await getAssetToDownload(mockRelease); throw new Error("Expected an error to be thrown, but it wasn't."); } catch (error: any) { - expect(error.message).to.include("Unsupported platform"); + expect(error.message).to.include('Unsupported platform'); } }); }); @@ -146,18 +150,21 @@ describe('node-zksync plugin', async function () { const mockRelease = { assets: [ { - url: "https://api.github.com/repos/matter-labs/era-test-node/releases/assets/1", - browser_download_url: "https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-aarch64-apple-darwin.tar.gz", + url: 'https://api.github.com/repos/matter-labs/era-test-node/releases/assets/1', + browser_download_url: + 'https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-aarch64-apple-darwin.tar.gz', }, { - url: "https://api.github.com/repos/matter-labs/era-test-node/releases/assets/2", - browser_download_url: "https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-x86_64-apple-darwin.tar.gz", + url: 'https://api.github.com/repos/matter-labs/era-test-node/releases/assets/2', + browser_download_url: + 'https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-x86_64-apple-darwin.tar.gz', }, { - url: "https://api.github.com/repos/matter-labs/era-test-node/releases/assets/3", - browser_download_url: "https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-x86_64-unknown-linux-gnu.tar.gz", - } - ] + url: 'https://api.github.com/repos/matter-labs/era-test-node/releases/assets/3', + browser_download_url: + 'https://github.com/matter-labs/era-test-node/releases/download/v0.1.0/era_test_node-v0.1.0-x86_64-unknown-linux-gnu.tar.gz', + }, + ], }; beforeEach(() => { @@ -182,51 +189,51 @@ describe('node-zksync plugin', async function () { response: { status: 404, data: { - message: "Not Found" - } - } + message: 'Not Found', + }, + }, }; axiosGetStub.rejects(errorResponse); try { await getLatestRelease('owner', 'repo', 'userAgent'); - assert.fail("Expected an error to be thrown"); + assert.fail('Expected an error to be thrown'); } catch (error: any) { - expect(error.message).to.include("Failed to get latest release"); - expect(error.message).to.include("404"); - expect(error.message).to.include("Not Found"); + expect(error.message).to.include('Failed to get latest release'); + expect(error.message).to.include('404'); + expect(error.message).to.include('Not Found'); } }); it('should handle errors when no response is received', async () => { const errorNoResponse = { request: {}, - message: "No response" + message: 'No response', }; axiosGetStub.rejects(errorNoResponse); try { await getLatestRelease('owner', 'repo', 'userAgent'); - assert.fail("Expected an error to be thrown"); + assert.fail('Expected an error to be thrown'); } catch (error: any) { - expect(error.message).to.include("No response received"); + expect(error.message).to.include('No response received'); } }); it('should handle errors during request setup', async () => { const errorSetup = { - message: "Setup error" + message: 'Setup error', }; axiosGetStub.rejects(errorSetup); try { await getLatestRelease('owner', 'repo', 'userAgent'); - assert.fail("Expected an error to be thrown"); + assert.fail('Expected an error to be thrown'); } catch (error: any) { - expect(error.message).to.include("Failed to set up the request"); + expect(error.message).to.include('Failed to set up the request'); } }); }); @@ -240,7 +247,9 @@ describe('node-zksync plugin', async function () { beforeEach(() => { downloadStub = sinon.stub(utils, 'download'); existsSyncStub = sinon.stub(fs, 'existsSync'); - postProcessDownloadStub = sinon.stub(RPCServerDownloader.prototype as any, '_postProcessDownload').resolves(); // Stubbing the private method + postProcessDownloadStub = sinon + .stub(RPCServerDownloader.prototype as any, '_postProcessDownload') + .resolves(); // Stubbing the private method }); afterEach(() => { @@ -248,7 +257,6 @@ describe('node-zksync plugin', async function () { }); describe('isDownloaded', () => { - it('should return true if binary exists', async () => { const downloader = new RPCServerDownloader('/path/to/dir', 'version'); existsSyncStub.returns(true); @@ -264,11 +272,9 @@ describe('node-zksync plugin', async function () { const result = await downloader.isDownloaded(); expect(result).to.be.false; }); - }); describe('download', () => { - it('should download the binary if not already downloaded', async () => { const downloader = new RPCServerDownloader('/path/to/dir', 'version'); existsSyncStub.returns(false); @@ -289,7 +295,6 @@ describe('node-zksync plugin', async function () { expect(error.message).to.contain('Error downloading binary from URL'); } }); - }); describe('getBinaryPath', () => { @@ -300,7 +305,6 @@ describe('node-zksync plugin', async function () { expect(result).to.equal('/path/to/dir/version'); }); }); - }); describe('JsonRpcServer', () => { @@ -313,7 +317,7 @@ describe('node-zksync plugin', async function () { // Because we cannot stub the execSync method directly, we use proxyquire to stub the entire 'child_process' module const { JsonRpcServer } = proxyquire('../src/server', { - 'child_process': { execSync: execSyncStub } + child_process: { execSync: execSyncStub }, }); beforeEach(() => { @@ -345,7 +349,7 @@ describe('node-zksync plugin', async function () { it('should handle termination signals gracefully', () => { const server = new JsonRpcServer('/path/to/binary'); const error = new Error('Mocked error') as ExecSyncError; - error.signal = PROCESS_TERMINATION_SIGNALS[0]; // Let's simulate the first signal, e.g., 'SIGINT' + error.signal = PROCESS_TERMINATION_SIGNALS[0]; // Let's simulate the first signal, e.g., 'SIGINT' execSyncStub.throws(error); try { @@ -355,7 +359,10 @@ describe('node-zksync plugin', async function () { expect.fail('Did not expect an error to be thrown'); } - sinon.assert.calledWith(consoleInfoStub, chalk.yellow(`Received ${PROCESS_TERMINATION_SIGNALS[0]} signal. The server process has exited.`)); + sinon.assert.calledWith( + consoleInfoStub, + chalk.yellow(`Received ${PROCESS_TERMINATION_SIGNALS[0]} signal. The server process has exited.`) + ); }); it('should throw an error if the server process exits with an error', () => { @@ -379,7 +386,7 @@ describe('node-zksync plugin', async function () { let serverProcess: ChildProcess; function delay(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); + return new Promise((resolve) => setTimeout(resolve, ms)); } afterEach(() => { @@ -422,7 +429,6 @@ describe('node-zksync plugin', async function () { // cwd: path.join(__dirname, 'fixture-projects', 'simple'), // }); - // await delay(2000); // try { From 02e7b88b185f125cc5d2c40d64af4a98cba2045b Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 6 Oct 2023 15:20:43 +0200 Subject: [PATCH 11/17] Overried test task in hardhat-zksync-node plugin --- packages/hardhat-zksync-node/src/constants.ts | 1 + packages/hardhat-zksync-node/src/index.ts | 80 +++++++++++++++++++ .../src/type-extensions.ts | 25 ++++++ 3 files changed, 106 insertions(+) create mode 100644 packages/hardhat-zksync-node/src/type-extensions.ts diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index 10283f139..6125eedbc 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -6,6 +6,7 @@ export const ZKNODE_BIN_REPOSITORY_NAME = 'era-test-node'; export const TASK_NODE_ZKSYNC = 'node-zksync'; export const TASK_NODE_ZKSYNC_CREATE_SERVER = 'node-zksync:create-server'; export const TASK_NODE_ZKSYNC_DOWNLOAD_BINARY = 'node-zksync:download-binary'; +export const TASK_RUN_TASK_IN_SEPARATE_PROCESS = 'run-task-in-separate-process'; export const PROCESS_TERMINATION_SIGNALS = ['SIGINT', 'SIGTERM']; diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index a416285bc..cd57145c3 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -1,10 +1,15 @@ +import { spawn } from 'child_process'; import { task, subtask, types } from 'hardhat/config'; +import { TASK_COMPILE, TASK_TEST, TASK_TEST_GET_TEST_FILES, TASK_TEST_RUN_MOCHA_TESTS, TASK_TEST_RUN_SHOW_FORK_RECOMMENDATIONS, TASK_TEST_SETUP_TEST_ENVIRONMENT } from 'hardhat/builtin-tasks/task-names'; +import { Provider } from 'zksync-web3'; import { PLUGIN_NAME, + PROCESS_TERMINATION_SIGNALS, TASK_NODE_ZKSYNC, TASK_NODE_ZKSYNC_CREATE_SERVER, TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, + TASK_RUN_TASK_IN_SEPARATE_PROCESS, ZKNODE_BIN_OWNER, ZKNODE_BIN_REPOSITORY_NAME, } from './constants'; @@ -191,3 +196,78 @@ task(TASK_NODE_ZKSYNC, 'Starts a JSON-RPC server for zkSync node') } } ); + +subtask(TASK_RUN_TASK_IN_SEPARATE_PROCESS, "Runs a Hardhat task in a separate process.") + .addParam("taskName", "The name of the Hardhat task to run.", undefined, types.string) + .addVariadicPositionalParam("taskArgs", "Arguments for the Hardhat task.") + .setAction(async ({ taskName, taskArgs = [] }, hre) => { + const taskProcess = spawn('npx', ['hardhat', taskName, ...taskArgs], { + detached: true, // This creates a separate process group + }); + + return taskProcess; + }); + +task(TASK_TEST, async ( + { + testFiles, + noCompile, + parallel, + bail, + grep, + }: { + testFiles: string[]; + noCompile: boolean; + parallel: boolean; + bail: boolean; + grep?: string; + }, + { run, network }, + runSuper +) => { + if (network.zksync !== true) { + return await runSuper(); + } + + if (!noCompile) { + await run(TASK_COMPILE, { quiet: true }); + } + + const files = await run(TASK_TEST_GET_TEST_FILES, { testFiles }); + + // Start the zkSync node using TASK_RUN_TASK_IN_SEPARATE_PROCESS + const taskArgs: any[] = [/* Add necessary arguments here */]; + const taskProcess = await run(TASK_RUN_TASK_IN_SEPARATE_PROCESS, { + taskName: TASK_NODE_ZKSYNC, + taskArgs: taskArgs + }); + + // Give the node some time to start + await new Promise(resolve => setTimeout(resolve, 5000)); + + // Modify the hre object to point to the running zkSync node + network.name = "zkSyncEraTestNode"; // or the appropriate network name + + let testFailures = 0; + try { + // Run the tests + testFailures = await run(TASK_TEST_RUN_MOCHA_TESTS, { + testFiles: files, + parallel, + bail, + grep, + }); + } finally { + // Ensure we shut down the zkSync node after tests are done + if (taskProcess) { + try { + process.kill(-taskProcess.pid!); // Notice the '-' before the pid + } catch (e) { + // Handle potential errors + } + } + } + + process.exitCode = testFailures; + return testFailures; +}); \ No newline at end of file diff --git a/packages/hardhat-zksync-node/src/type-extensions.ts b/packages/hardhat-zksync-node/src/type-extensions.ts new file mode 100644 index 000000000..b3736776f --- /dev/null +++ b/packages/hardhat-zksync-node/src/type-extensions.ts @@ -0,0 +1,25 @@ +import 'hardhat/types/config'; + +declare module 'hardhat/types/config' { + interface HardhatNetworkUserConfig { + zksync?: boolean; + } + + interface HttpNetworkUserConfig { + zksync?: boolean; + } + + interface HardhatNetworkConfig { + zksync: boolean; + } + + interface HttpNetworkConfig { + zksync: boolean; + } +} + +declare module 'hardhat/types/runtime' { + interface Network { + zksync: boolean; + } +} From 81bae13055d471139ac072bafb39a9e8802ea867 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Thu, 12 Oct 2023 11:41:13 +0200 Subject: [PATCH 12/17] Override tast test in hardhat-zkysnc-node plugin --- packages/hardhat-zksync-node/package.json | 2 + packages/hardhat-zksync-node/src/constants.ts | 22 +++- packages/hardhat-zksync-node/src/index.ts | 59 +++++---- .../src/type-extensions.ts | 2 + packages/hardhat-zksync-node/src/utils.ts | 103 +++++++++++++++- .../src/zksync-provider-adapter.ts | 28 +++++ packages/hardhat-zksync-node/test/tests.ts | 113 +++++++++++++++++- yarn.lock | 25 +++- 8 files changed, 327 insertions(+), 27 deletions(-) create mode 100644 packages/hardhat-zksync-node/src/zksync-provider-adapter.ts diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index db085088a..300f8bdd8 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -44,6 +44,7 @@ "@types/mocha": "^9.1.0", "@types/node": "^18.11.17", "@types/proxyquire": "^1.3.29", + "@types/sinon-chai": "^3.2.10", "@typescript-eslint/eslint-plugin": "5.13.0", "@typescript-eslint/parser": "5.13.0", "chai": "^4.3.6", @@ -58,6 +59,7 @@ "proxyquire": "^2.1.3", "rimraf": "^3.0.2", "sinon": "^16.0.0", + "sinon-chai": "^3.7.0", "ts-node": "^10.6.0", "typescript": "^4.6.2", "zksync-web3": "^0.14.3" diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index 6125eedbc..256d90483 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -6,7 +6,7 @@ export const ZKNODE_BIN_REPOSITORY_NAME = 'era-test-node'; export const TASK_NODE_ZKSYNC = 'node-zksync'; export const TASK_NODE_ZKSYNC_CREATE_SERVER = 'node-zksync:create-server'; export const TASK_NODE_ZKSYNC_DOWNLOAD_BINARY = 'node-zksync:download-binary'; -export const TASK_RUN_TASK_IN_SEPARATE_PROCESS = 'run-task-in-separate-process'; +export const TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS = 'node-zksync:run-in-separate-process'; export const PROCESS_TERMINATION_SIGNALS = ['SIGINT', 'SIGTERM']; @@ -26,6 +26,26 @@ export const PLATFORM_MAP: Record = { export const TEMP_FILE_PREFIX = 'tmp-'; +export const START_PORT = 8011; +export const MAX_PORT_ATTEMPTS = 10; +export const PORT_CHECK_DELAY = 500; +export const RPC_ENDPOINT_PATH = "eth_chainId"; + +export const ZKSYNC_ERA_TEST_NODE_NETWORK_NAME = 'zkSyncEraTestNode'; +export const BASE_URL = `http://localhost`; +export const NETWORK_ACCOUNTS = { + REMOTE: "remote" +}; +export const NETWORK_GAS = { + AUTO: "auto" +}; +export const NETWORK_GAS_PRICE = { + AUTO: "auto" +}; +export const NETWORK_ETH = { + LOCALHOST: "localhost" +}; + // export const TOOLCHAIN_MAP: Record = { // linux: '-musl', // win32: '-gnu', diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index cd57145c3..dbc3cb23e 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -4,19 +4,23 @@ import { TASK_COMPILE, TASK_TEST, TASK_TEST_GET_TEST_FILES, TASK_TEST_RUN_MOCHA_ import { Provider } from 'zksync-web3'; import { + MAX_PORT_ATTEMPTS, PLUGIN_NAME, - PROCESS_TERMINATION_SIGNALS, + START_PORT, TASK_NODE_ZKSYNC, TASK_NODE_ZKSYNC_CREATE_SERVER, TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, - TASK_RUN_TASK_IN_SEPARATE_PROCESS, + TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, ZKNODE_BIN_OWNER, ZKNODE_BIN_REPOSITORY_NAME, } from './constants'; import { JsonRpcServer } from './server'; -import { constructCommandArgs, getAssetToDownload, getLatestRelease, getRPCServerBinariesDir } from './utils'; +import { adjustTaskArgsForPort, configureNetwork, constructCommandArgs, getAssetToDownload, getAvailablePort, getLatestRelease, getPlatform, getRPCServerBinariesDir, isPortAvailable, waitForNodeToBeReady } from './utils'; import { RPCServerDownloader } from './downloader'; import { ZkSyncNodePluginError } from './errors'; +import { ZkSyncProviderAdapter } from './zksync-provider-adapter'; +import chalk from 'chalk'; +import { HARDHAT_NETWORK_NAME } from 'hardhat/plugins'; // Subtask to download the binary subtask(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, 'Downloads the JSON-RPC server binary') @@ -197,15 +201,21 @@ task(TASK_NODE_ZKSYNC, 'Starts a JSON-RPC server for zkSync node') } ); -subtask(TASK_RUN_TASK_IN_SEPARATE_PROCESS, "Runs a Hardhat task in a separate process.") - .addParam("taskName", "The name of the Hardhat task to run.", undefined, types.string) - .addVariadicPositionalParam("taskArgs", "Arguments for the Hardhat task.") - .setAction(async ({ taskName, taskArgs = [] }, hre) => { - const taskProcess = spawn('npx', ['hardhat', taskName, ...taskArgs], { +subtask(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, "Runs a Hardhat node-zksync task in a separate process.") + .addVariadicPositionalParam("taskArgs", "Arguments for the Hardhat node-zksync task.") + .setAction(async ({ taskArgs = [] }, hre) => { + const currentPort = await getAvailablePort(START_PORT, MAX_PORT_ATTEMPTS); + const adjustedArgs = adjustTaskArgsForPort(taskArgs, currentPort); + + const taskProcess = spawn('npx', ['hardhat', TASK_NODE_ZKSYNC, ...adjustedArgs], { detached: true, // This creates a separate process group + // stdio: 'inherit', }); - return taskProcess; + return { + process: taskProcess, + port: currentPort + }; }); task(TASK_TEST, async ( @@ -225,28 +235,29 @@ task(TASK_TEST, async ( { run, network }, runSuper ) => { - if (network.zksync !== true) { + if (network.zksync !== true || network.name !== HARDHAT_NETWORK_NAME) { return await runSuper(); } + const platform = getPlatform(); + if (platform === 'windows' || platform === '') { + throw new ZkSyncNodePluginError(`Unsupported platform: ${platform}`); + } + if (!noCompile) { await run(TASK_COMPILE, { quiet: true }); } const files = await run(TASK_TEST_GET_TEST_FILES, { testFiles }); - // Start the zkSync node using TASK_RUN_TASK_IN_SEPARATE_PROCESS + // Start the zkSync node using TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS const taskArgs: any[] = [/* Add necessary arguments here */]; - const taskProcess = await run(TASK_RUN_TASK_IN_SEPARATE_PROCESS, { - taskName: TASK_NODE_ZKSYNC, + const { process: taskProcess, port } = await run(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, { taskArgs: taskArgs }); - // Give the node some time to start - await new Promise(resolve => setTimeout(resolve, 5000)); - - // Modify the hre object to point to the running zkSync node - network.name = "zkSyncEraTestNode"; // or the appropriate network name + await waitForNodeToBeReady(port) + configureNetwork(network, port); let testFailures = 0; try { @@ -261,13 +272,17 @@ task(TASK_TEST, async ( // Ensure we shut down the zkSync node after tests are done if (taskProcess) { try { - process.kill(-taskProcess.pid!); // Notice the '-' before the pid - } catch (e) { - // Handle potential errors + process.kill(-taskProcess.pid!); + } catch (error: any) { + if (error.code !== 'ESRCH') { // ESRCH means the process was already terminated + console.info(chalk.red(`Failed to kill the zkSync node process when running tests: ${error.message}`)); + } } } } process.exitCode = testFailures; return testFailures; -}); \ No newline at end of file +}); + +export { ZkSyncProviderAdapter } from './zksync-provider-adapter'; diff --git a/packages/hardhat-zksync-node/src/type-extensions.ts b/packages/hardhat-zksync-node/src/type-extensions.ts index b3736776f..98cb732c9 100644 --- a/packages/hardhat-zksync-node/src/type-extensions.ts +++ b/packages/hardhat-zksync-node/src/type-extensions.ts @@ -3,6 +3,7 @@ import 'hardhat/types/config'; declare module 'hardhat/types/config' { interface HardhatNetworkUserConfig { zksync?: boolean; + ethNetwork?: string; } interface HttpNetworkUserConfig { @@ -15,6 +16,7 @@ declare module 'hardhat/types/config' { interface HttpNetworkConfig { zksync: boolean; + ethNetwork?: string; } } diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index 7fb5fbe9b..c62f58ac9 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -2,9 +2,12 @@ import path from 'path'; import axios from 'axios'; import util from 'util'; import fs from 'fs'; +import net from 'net'; import fse from 'fs-extra'; import { exec } from 'child_process'; import type { Dispatcher } from 'undici'; +import { ZkSyncProviderAdapter } from './zksync-provider-adapter'; +import { Provider } from 'zksync-web3'; import { ALLOWED_CACHE_VALUES, @@ -14,8 +17,14 @@ import { ALLOWED_SHOW_GAS_DETAILS_VALUES, ALLOWED_SHOW_STORAGE_LOGS_VALUES, ALLOWED_SHOW_VM_DETAILS_VALUES, + BASE_URL, + NETWORK_ACCOUNTS, + NETWORK_ETH, + NETWORK_GAS, + NETWORK_GAS_PRICE, PLATFORM_MAP, TEMP_FILE_PREFIX, + ZKSYNC_ERA_TEST_NODE_NETWORK_NAME, } from './constants'; import { ZkSyncNodePluginError } from './errors'; import { CommandArguments } from './types'; @@ -124,7 +133,7 @@ export function constructCommandArgs(args: CommandArguments): string[] { return commandArgs; } -function getPlatform() { +export function getPlatform() { return PLATFORM_MAP[process.platform] || ''; } @@ -290,3 +299,95 @@ export async function download( // eslint-disable-next-line throw new Error(`Failed to download ${url} - ${response.statusCode} received. ${text}`); } + +async function isPortAvailableForIP(port: number, ip: string): Promise { + return new Promise((resolve) => { + const tester: net.Server = net.createServer() + .once('error', (err: any) => resolve(err.code !== 'EADDRINUSE')) + .once('listening', () => tester.close(() => resolve(true))) + .listen(port, ip); + }); +} + +export async function isPortAvailable(port: number): Promise { + const availableIPv4 = await isPortAvailableForIP(port, '0.0.0.0'); + const availableIPv6 = await isPortAvailableForIP(port, '::'); + return availableIPv4 && availableIPv6; +} + +export async function waitForNodeToBeReady(port: number, maxAttempts: number = 10): Promise { + const rpcEndpoint = `http://localhost:${port}`; + + const payload = { + jsonrpc: "2.0", + method: "eth_chainId", + params: [], + id: new Date().getTime() // Unique ID for the request + }; + + let attempts = 0; + while (attempts < maxAttempts) { + try { + const response = await axios.post(rpcEndpoint, payload); + + if (response.data && response.data.result) { + return; // The node responded with a valid chain ID + } + } catch (e) { + // If it fails, it will just try again + } + + attempts++; + await new Promise(r => setTimeout(r, 500)); // Wait for 500ms before the next attempt. + } + + throw new ZkSyncNodePluginError("Server didn't respond after multiple attempts"); +} + +export async function getAvailablePort(startPort: number, maxAttempts: number): Promise { + let currentPort = startPort; + for (let i = 0; i < maxAttempts; i++) { + if (await isPortAvailable(currentPort)) { + return currentPort; + } + currentPort++; + } + throw new ZkSyncNodePluginError("Couldn't find an available port after several attempts"); +} + +export function adjustTaskArgsForPort(taskArgs: string[], currentPort: number): string[] { + const portArg = '--port'; + const portArgIndex = taskArgs.indexOf(portArg); + if (portArgIndex !== -1) { + if (portArgIndex + 1 < taskArgs.length) { + taskArgs[portArgIndex + 1] = `${currentPort}`; + } else { + throw new ZkSyncNodePluginError("Invalid task arguments: --port provided without a following port number."); + } + } else { + taskArgs.push(portArg, `${currentPort}`); + } + return taskArgs; +} + +function getNetworkConfig(url: string) { + return { + accounts: NETWORK_ACCOUNTS.REMOTE, + gas: NETWORK_GAS.AUTO, + gasPrice: NETWORK_GAS_PRICE.AUTO, + gasMultiplier: 1, + httpHeaders: {}, + timeout: 20000, + url: url, + ethNetwork: NETWORK_ETH.LOCALHOST, + zksync: true, + }; +} + +export function configureNetwork(network: any, port: number) { + const url = `${BASE_URL}:${port}`; + + network.name = ZKSYNC_ERA_TEST_NODE_NETWORK_NAME; + network.config = getNetworkConfig(url); + network.provider = new ZkSyncProviderAdapter(new Provider(url)); +} \ No newline at end of file diff --git a/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts b/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts new file mode 100644 index 000000000..a6b19d311 --- /dev/null +++ b/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts @@ -0,0 +1,28 @@ +import { EthereumProvider } from 'hardhat/types'; +import { EventEmitter } from "events"; +import { Provider } from 'zksync-web3'; + +export class ZkSyncProviderAdapter extends EventEmitter implements EthereumProvider { + + constructor(public readonly _zkSyncProvider: Provider) { + super(); + } + + async send(method: string, params: any): Promise { + return await this._zkSyncProvider.send(method, params); + } + + async request(payload: { method: string, params?: any[] }): Promise { + return await this._zkSyncProvider.send(payload.method, payload.params ?? []); + } + + async sendAsync(payload: any, callback: (error: Error | null, result?: any) => void): Promise { + try { + const result = await this._zkSyncProvider.send(payload.method, payload.params); + callback(null, result); + } catch (error) { + callback(error as Error); + } + } +} + diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index 045edfacb..2c8d77b5f 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -1,16 +1,35 @@ import { expect, assert } from 'chai'; +import chai from 'chai'; +import sinonChai from 'sinon-chai'; import chalk from 'chalk'; import sinon from 'sinon'; import axios from 'axios'; import fs from 'fs'; import path from 'path'; +import net from 'net'; import proxyquire from 'proxyquire'; import { spawn, ChildProcess } from 'child_process'; import * as utils from '../src/utils'; import { constructCommandArgs, getLatestRelease, getAssetToDownload, download } from '../src/utils'; import { RPCServerDownloader } from '../src/downloader'; -import { TASK_NODE_ZKSYNC, PROCESS_TERMINATION_SIGNALS } from '../src/constants'; +import { TASK_NODE_ZKSYNC, PROCESS_TERMINATION_SIGNALS, ZKSYNC_ERA_TEST_NODE_NETWORK_NAME, MAX_PORT_ATTEMPTS } from '../src/constants'; +import { Network } from 'hardhat/types'; + +chai.use(sinonChai); + +async function getPort(): Promise { + return new Promise((resolve, reject) => { + const server = net.createServer(); + server.listen(0, () => { + const port = (server.address() as net.AddressInfo).port; + server.close(() => { + resolve(port); + }); + }); + server.on('error', reject); + }); +} describe('node-zksync plugin', async function () { describe('Utils', () => { @@ -237,6 +256,96 @@ describe('node-zksync plugin', async function () { } }); }); + + describe('waitForNodeToBeReady', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return successfully if the node is ready', async () => { + // Mock the axios.post to simulate a node being ready + sinon.stub(axios, 'post').resolves({ data: { result: true } }); + + const port = 12345; // any port for testing purposes + await utils.waitForNodeToBeReady(port); + + expect(axios.post).to.have.been.calledWith(`http://localhost:${port}`); + }); + + it('should throw an error if the node isn\'t ready after maxAttempts', async () => { + // Make the stub reject all the time to simulate the node never being ready + sinon.stub(axios, 'post').rejects(new Error('Node not ready')); + + try { + await utils.waitForNodeToBeReady(8080, 10); + throw new Error('Expected waitForNodeToBeReady to throw but it did not'); + } catch (err: any) { + expect(err.message).to.equal('Server didn\'t respond after multiple attempts'); + } + }); + }); + + describe('adjustTaskArgsForPort', () => { + it('should correctly add the --port argument if it\'s not present', () => { + const result = utils.adjustTaskArgsForPort([], 8000); + expect(result).to.deep.equal(['--port', '8000']); + }); + + it('should correctly update the --port argument if it\'s present', () => { + const result = utils.adjustTaskArgsForPort(['--port', '9000'], 8000); + expect(result).to.deep.equal(['--port', '8000']); + }); + }); + + describe('isPortAvailable', () => { + let server: net.Server; + + afterEach(() => { + if (server) server.close(); + }); + + it('should correctly identify an available port', async () => { + const port = await getPort(); + const result = await utils.isPortAvailable(port); + expect(result).to.be.true; + }); + + it('should correctly identify a port that is in use', async () => { + const port = await getPort(); + server = net.createServer().listen(port); + const result = await utils.isPortAvailable(port); + expect(result).to.be.false; + }); + }); + + describe('getAvailablePort', () => { + let isPortAvailableStub: sinon.SinonStub<[number], Promise>; + + beforeEach(() => { + isPortAvailableStub = sinon.stub(utils, 'isPortAvailable'); + }); + + afterEach(() => { + isPortAvailableStub.restore(); + }); + + it('should return the first available port', async () => { + isPortAvailableStub.returns(Promise.resolve(true)); + const port = await utils.getAvailablePort(8080, 10); + expect(port).to.equal(8080); + }); + + // it('should throw an error after checking the maxAttempts number of ports', async () => { + // isPortAvailableStub.returns(Promise.resolve(false)); + + // try { + // await utils.getAvailablePort(8000, 10); + // throw new Error('Expected getAvailablePort to throw but it did not'); + // } catch (err: any) { + // expect(err.message).to.equal('Couldn\'t find an available port after several attempts'); + // } + // }); + }); }); describe('RPCServerDownloader', () => { @@ -380,7 +489,7 @@ describe('node-zksync plugin', async function () { }); }); - describe('Testing task', function () { + describe('TASK_NODE_ZKSYNC', function () { this.timeout(10000); // Increase timeout if needed let serverProcess: ChildProcess; diff --git a/yarn.lock b/yarn.lock index 02e96f1d5..2070fcb39 100644 --- a/yarn.lock +++ b/yarn.lock @@ -708,6 +708,9 @@ globby "^11.0.0" read-yaml-file "^1.1.0" +"@matterlabs/hardhat-zksync-chai-matchers@link:packages/hardhat-zksync-chai-matchers": + version "0.1.4" + "@matterlabs/hardhat-zksync-deploy@link:packages/hardhat-zksync-deploy": version "0.6.5" dependencies: @@ -716,7 +719,7 @@ ts-morph "^19.0.0" "@matterlabs/hardhat-zksync-node@link:packages/hardhat-zksync-node": - version "0.0.1" + version "0.0.1-beta.1" dependencies: "@matterlabs/hardhat-zksync-solc" "0.4.2" axios "^1.4.0" @@ -1529,6 +1532,21 @@ resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.12.tgz#920447fdd78d76b19de0438b7f60df3c4a80bf1c" integrity sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A== +"@types/sinon-chai@^3.2.10": + version "3.2.10" + resolved "https://registry.yarnpkg.com/@types/sinon-chai/-/sinon-chai-3.2.10.tgz#fb876d4766847b63107b379844f82a8819153b05" + integrity sha512-D+VFqUjMqeku/FGl4Ioo+fDeWOaIfbZ6Oj+glgFUgz5m5RJ4kgCER3FdV1uvhmEt0A+FRz+juPdybFlg5Hxfow== + dependencies: + "@types/chai" "*" + "@types/sinon" "*" + +"@types/sinon@*": + version "10.0.19" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.19.tgz#752b752bc40bb5af0bb1aec29bde49b139b91d35" + integrity sha512-MWZNGPSchIdDfb5FL+VFi4zHsHbNOTQEgjqFQk7HazXSXwUU9PAX3z9XBqb3AJGYr9YwrtCtaSMsT3brYsN/jQ== + dependencies: + "@types/sinonjs__fake-timers" "*" + "@types/sinon@^10.0.13": version "10.0.13" resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.13.tgz#60a7a87a70d9372d0b7b38cc03e825f46981fb83" @@ -5614,6 +5632,11 @@ signal-exit@^3.0.0, signal-exit@^3.0.2: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== +sinon-chai@^3.7.0: + version "3.7.0" + resolved "https://registry.yarnpkg.com/sinon-chai/-/sinon-chai-3.7.0.tgz#cfb7dec1c50990ed18c153f1840721cf13139783" + integrity sha512-mf5NURdUaSdnatJx3uhoBOrY9dtL19fiOtAdT1Azxg3+lNJFiuN0uzaU3xX1LeAfL17kHQhTAJgpsfhbMJMY2g== + sinon@^15.0.1: version "15.0.1" resolved "https://registry.yarnpkg.com/sinon/-/sinon-15.0.1.tgz#ce062611a0b131892e2c18f03055b8eb6e8dc234" From 65d5ba0346faf96190a00bc961e296350236d875 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Thu, 12 Oct 2023 11:42:34 +0200 Subject: [PATCH 13/17] Run prettier --- packages/hardhat-zksync-node/src/constants.ts | 10 +- packages/hardhat-zksync-node/src/index.ts | 148 ++++++++++-------- packages/hardhat-zksync-node/src/utils.ts | 19 +-- .../src/zksync-provider-adapter.ts | 6 +- packages/hardhat-zksync-node/test/tests.ts | 21 ++- 5 files changed, 117 insertions(+), 87 deletions(-) diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index 256d90483..df346153c 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -29,21 +29,21 @@ export const TEMP_FILE_PREFIX = 'tmp-'; export const START_PORT = 8011; export const MAX_PORT_ATTEMPTS = 10; export const PORT_CHECK_DELAY = 500; -export const RPC_ENDPOINT_PATH = "eth_chainId"; +export const RPC_ENDPOINT_PATH = 'eth_chainId'; export const ZKSYNC_ERA_TEST_NODE_NETWORK_NAME = 'zkSyncEraTestNode'; export const BASE_URL = `http://localhost`; export const NETWORK_ACCOUNTS = { - REMOTE: "remote" + REMOTE: 'remote', }; export const NETWORK_GAS = { - AUTO: "auto" + AUTO: 'auto', }; export const NETWORK_GAS_PRICE = { - AUTO: "auto" + AUTO: 'auto', }; export const NETWORK_ETH = { - LOCALHOST: "localhost" + LOCALHOST: 'localhost', }; // export const TOOLCHAIN_MAP: Record = { diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index dbc3cb23e..1fa2aaddb 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -1,6 +1,13 @@ import { spawn } from 'child_process'; import { task, subtask, types } from 'hardhat/config'; -import { TASK_COMPILE, TASK_TEST, TASK_TEST_GET_TEST_FILES, TASK_TEST_RUN_MOCHA_TESTS, TASK_TEST_RUN_SHOW_FORK_RECOMMENDATIONS, TASK_TEST_SETUP_TEST_ENVIRONMENT } from 'hardhat/builtin-tasks/task-names'; +import { + TASK_COMPILE, + TASK_TEST, + TASK_TEST_GET_TEST_FILES, + TASK_TEST_RUN_MOCHA_TESTS, + TASK_TEST_RUN_SHOW_FORK_RECOMMENDATIONS, + TASK_TEST_SETUP_TEST_ENVIRONMENT, +} from 'hardhat/builtin-tasks/task-names'; import { Provider } from 'zksync-web3'; import { @@ -15,7 +22,18 @@ import { ZKNODE_BIN_REPOSITORY_NAME, } from './constants'; import { JsonRpcServer } from './server'; -import { adjustTaskArgsForPort, configureNetwork, constructCommandArgs, getAssetToDownload, getAvailablePort, getLatestRelease, getPlatform, getRPCServerBinariesDir, isPortAvailable, waitForNodeToBeReady } from './utils'; +import { + adjustTaskArgsForPort, + configureNetwork, + constructCommandArgs, + getAssetToDownload, + getAvailablePort, + getLatestRelease, + getPlatform, + getRPCServerBinariesDir, + isPortAvailable, + waitForNodeToBeReady, +} from './utils'; import { RPCServerDownloader } from './downloader'; import { ZkSyncNodePluginError } from './errors'; import { ZkSyncProviderAdapter } from './zksync-provider-adapter'; @@ -201,8 +219,8 @@ task(TASK_NODE_ZKSYNC, 'Starts a JSON-RPC server for zkSync node') } ); -subtask(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, "Runs a Hardhat node-zksync task in a separate process.") - .addVariadicPositionalParam("taskArgs", "Arguments for the Hardhat node-zksync task.") +subtask(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, 'Runs a Hardhat node-zksync task in a separate process.') + .addVariadicPositionalParam('taskArgs', 'Arguments for the Hardhat node-zksync task.') .setAction(async ({ taskArgs = [] }, hre) => { const currentPort = await getAvailablePort(START_PORT, MAX_PORT_ATTEMPTS); const adjustedArgs = adjustTaskArgsForPort(taskArgs, currentPort); @@ -214,75 +232,83 @@ subtask(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, "Runs a Hardhat node-zksync ta return { process: taskProcess, - port: currentPort + port: currentPort, }; }); -task(TASK_TEST, async ( - { - testFiles, - noCompile, - parallel, - bail, - grep, - }: { - testFiles: string[]; - noCompile: boolean; - parallel: boolean; - bail: boolean; - grep?: string; - }, - { run, network }, - runSuper -) => { - if (network.zksync !== true || network.name !== HARDHAT_NETWORK_NAME) { - return await runSuper(); - } +task( + TASK_TEST, + async ( + { + testFiles, + noCompile, + parallel, + bail, + grep, + }: { + testFiles: string[]; + noCompile: boolean; + parallel: boolean; + bail: boolean; + grep?: string; + }, + { run, network }, + runSuper + ) => { + if (network.zksync !== true || network.name !== HARDHAT_NETWORK_NAME) { + return await runSuper(); + } - const platform = getPlatform(); - if (platform === 'windows' || platform === '') { - throw new ZkSyncNodePluginError(`Unsupported platform: ${platform}`); - } + const platform = getPlatform(); + if (platform === 'windows' || platform === '') { + throw new ZkSyncNodePluginError(`Unsupported platform: ${platform}`); + } - if (!noCompile) { - await run(TASK_COMPILE, { quiet: true }); - } + if (!noCompile) { + await run(TASK_COMPILE, { quiet: true }); + } - const files = await run(TASK_TEST_GET_TEST_FILES, { testFiles }); + const files = await run(TASK_TEST_GET_TEST_FILES, { testFiles }); - // Start the zkSync node using TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS - const taskArgs: any[] = [/* Add necessary arguments here */]; - const { process: taskProcess, port } = await run(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, { - taskArgs: taskArgs - }); + // Start the zkSync node using TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS + const taskArgs: any[] = [ + /* Add necessary arguments here */ + ]; + const { process: taskProcess, port } = await run(TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS, { + taskArgs: taskArgs, + }); - await waitForNodeToBeReady(port) - configureNetwork(network, port); + await waitForNodeToBeReady(port); + configureNetwork(network, port); - let testFailures = 0; - try { - // Run the tests - testFailures = await run(TASK_TEST_RUN_MOCHA_TESTS, { - testFiles: files, - parallel, - bail, - grep, - }); - } finally { - // Ensure we shut down the zkSync node after tests are done - if (taskProcess) { - try { - process.kill(-taskProcess.pid!); - } catch (error: any) { - if (error.code !== 'ESRCH') { // ESRCH means the process was already terminated - console.info(chalk.red(`Failed to kill the zkSync node process when running tests: ${error.message}`)); + let testFailures = 0; + try { + // Run the tests + testFailures = await run(TASK_TEST_RUN_MOCHA_TESTS, { + testFiles: files, + parallel, + bail, + grep, + }); + } finally { + // Ensure we shut down the zkSync node after tests are done + if (taskProcess) { + try { + process.kill(-taskProcess.pid!); + } catch (error: any) { + if (error.code !== 'ESRCH') { + // ESRCH means the process was already terminated + console.info( + chalk.red(`Failed to kill the zkSync node process when running tests: ${error.message}`) + ); + } } } } - } - process.exitCode = testFailures; - return testFailures; -}); + process.exitCode = testFailures; + return testFailures; + } +); export { ZkSyncProviderAdapter } from './zksync-provider-adapter'; diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index c62f58ac9..783f745fa 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -302,7 +302,8 @@ export async function download( async function isPortAvailableForIP(port: number, ip: string): Promise { return new Promise((resolve) => { - const tester: net.Server = net.createServer() + const tester: net.Server = net + .createServer() .once('error', (err: any) => resolve(err.code !== 'EADDRINUSE')) .once('listening', () => tester.close(() => resolve(true))) .listen(port, ip); @@ -317,12 +318,12 @@ export async function isPortAvailable(port: number): Promise { export async function waitForNodeToBeReady(port: number, maxAttempts: number = 10): Promise { const rpcEndpoint = `http://localhost:${port}`; - + const payload = { - jsonrpc: "2.0", - method: "eth_chainId", + jsonrpc: '2.0', + method: 'eth_chainId', params: [], - id: new Date().getTime() // Unique ID for the request + id: new Date().getTime(), // Unique ID for the request }; let attempts = 0; @@ -338,7 +339,7 @@ export async function waitForNodeToBeReady(port: number, maxAttempts: number = 1 } attempts++; - await new Promise(r => setTimeout(r, 500)); // Wait for 500ms before the next attempt. + await new Promise((r) => setTimeout(r, 500)); // Wait for 500ms before the next attempt. } throw new ZkSyncNodePluginError("Server didn't respond after multiple attempts"); @@ -362,7 +363,7 @@ export function adjustTaskArgsForPort(taskArgs: string[], currentPort: number): if (portArgIndex + 1 < taskArgs.length) { taskArgs[portArgIndex + 1] = `${currentPort}`; } else { - throw new ZkSyncNodePluginError("Invalid task arguments: --port provided without a following port number."); + throw new ZkSyncNodePluginError('Invalid task arguments: --port provided without a following port number.'); } } else { taskArgs.push(portArg, `${currentPort}`); @@ -386,8 +387,8 @@ function getNetworkConfig(url: string) { export function configureNetwork(network: any, port: number) { const url = `${BASE_URL}:${port}`; - + network.name = ZKSYNC_ERA_TEST_NODE_NETWORK_NAME; network.config = getNetworkConfig(url); network.provider = new ZkSyncProviderAdapter(new Provider(url)); -} \ No newline at end of file +} diff --git a/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts b/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts index a6b19d311..7a3b31f27 100644 --- a/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts +++ b/packages/hardhat-zksync-node/src/zksync-provider-adapter.ts @@ -1,9 +1,8 @@ import { EthereumProvider } from 'hardhat/types'; -import { EventEmitter } from "events"; +import { EventEmitter } from 'events'; import { Provider } from 'zksync-web3'; export class ZkSyncProviderAdapter extends EventEmitter implements EthereumProvider { - constructor(public readonly _zkSyncProvider: Provider) { super(); } @@ -12,7 +11,7 @@ export class ZkSyncProviderAdapter extends EventEmitter implements EthereumProvi return await this._zkSyncProvider.send(method, params); } - async request(payload: { method: string, params?: any[] }): Promise { + async request(payload: { method: string; params?: any[] }): Promise { return await this._zkSyncProvider.send(payload.method, payload.params ?? []); } @@ -25,4 +24,3 @@ export class ZkSyncProviderAdapter extends EventEmitter implements EthereumProvi } } } - diff --git a/packages/hardhat-zksync-node/test/tests.ts b/packages/hardhat-zksync-node/test/tests.ts index 2c8d77b5f..6782e4571 100644 --- a/packages/hardhat-zksync-node/test/tests.ts +++ b/packages/hardhat-zksync-node/test/tests.ts @@ -13,7 +13,12 @@ import { spawn, ChildProcess } from 'child_process'; import * as utils from '../src/utils'; import { constructCommandArgs, getLatestRelease, getAssetToDownload, download } from '../src/utils'; import { RPCServerDownloader } from '../src/downloader'; -import { TASK_NODE_ZKSYNC, PROCESS_TERMINATION_SIGNALS, ZKSYNC_ERA_TEST_NODE_NETWORK_NAME, MAX_PORT_ATTEMPTS } from '../src/constants'; +import { + TASK_NODE_ZKSYNC, + PROCESS_TERMINATION_SIGNALS, + ZKSYNC_ERA_TEST_NODE_NETWORK_NAME, + MAX_PORT_ATTEMPTS, +} from '../src/constants'; import { Network } from 'hardhat/types'; chai.use(sinonChai); @@ -272,7 +277,7 @@ describe('node-zksync plugin', async function () { expect(axios.post).to.have.been.calledWith(`http://localhost:${port}`); }); - it('should throw an error if the node isn\'t ready after maxAttempts', async () => { + it("should throw an error if the node isn't ready after maxAttempts", async () => { // Make the stub reject all the time to simulate the node never being ready sinon.stub(axios, 'post').rejects(new Error('Node not ready')); @@ -280,18 +285,18 @@ describe('node-zksync plugin', async function () { await utils.waitForNodeToBeReady(8080, 10); throw new Error('Expected waitForNodeToBeReady to throw but it did not'); } catch (err: any) { - expect(err.message).to.equal('Server didn\'t respond after multiple attempts'); + expect(err.message).to.equal("Server didn't respond after multiple attempts"); } }); }); describe('adjustTaskArgsForPort', () => { - it('should correctly add the --port argument if it\'s not present', () => { + it("should correctly add the --port argument if it's not present", () => { const result = utils.adjustTaskArgsForPort([], 8000); expect(result).to.deep.equal(['--port', '8000']); }); - it('should correctly update the --port argument if it\'s present', () => { + it("should correctly update the --port argument if it's present", () => { const result = utils.adjustTaskArgsForPort(['--port', '9000'], 8000); expect(result).to.deep.equal(['--port', '8000']); }); @@ -299,17 +304,17 @@ describe('node-zksync plugin', async function () { describe('isPortAvailable', () => { let server: net.Server; - + afterEach(() => { if (server) server.close(); }); - + it('should correctly identify an available port', async () => { const port = await getPort(); const result = await utils.isPortAvailable(port); expect(result).to.be.true; }); - + it('should correctly identify a port that is in use', async () => { const port = await getPort(); server = net.createServer().listen(port); From 3557522b8ad8a77b2601e8295a38202ffe480626 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Thu, 12 Oct 2023 15:03:18 +0200 Subject: [PATCH 14/17] Bump hardhat-zksync-node version --- packages/hardhat-zksync-node/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index 300f8bdd8..1e13aac88 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -1,6 +1,6 @@ { "name": "@matterlabs/hardhat-zksync-node", - "version": "0.0.1-beta.1", + "version": "0.0.1-beta.2", "description": "Hardhat plugin to run zkSync era-test-node locally", "repository": "github:matter-labs/hardhat-zksync", "homepage": "https://github.com/matter-labs/hardhat-zksync/tree/main/packages/hardhat-zksync-node", From 066b11e710613dd60d11bf964157218c02fe2e95 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 13 Oct 2023 16:16:39 +0200 Subject: [PATCH 15/17] Add example to hardhat-zksync-node --- .github/workflows/ci.yml | 6 ++ examples/node-example/.eslintrc.js | 7 +++ examples/node-example/.gitignore | 3 + examples/node-example/README.md | 61 +++++++++++++++++++++ examples/node-example/contracts/Greeter.sol | 19 +++++++ examples/node-example/deploy/001_deploy.js | 44 +++++++++++++++ examples/node-example/hardhat.config.ts | 37 +++++++++++++ examples/node-example/package.json | 53 ++++++++++++++++++ examples/node-example/test/tests.ts | 35 ++++++++++++ examples/node-example/tsconfig.json | 18 ++++++ packages/hardhat-zksync-node/src/index.ts | 3 + packages/hardhat-zksync-node/src/utils.ts | 4 +- 12 files changed, 288 insertions(+), 2 deletions(-) create mode 100644 examples/node-example/.eslintrc.js create mode 100644 examples/node-example/.gitignore create mode 100644 examples/node-example/README.md create mode 100644 examples/node-example/contracts/Greeter.sol create mode 100644 examples/node-example/deploy/001_deploy.js create mode 100644 examples/node-example/hardhat.config.ts create mode 100644 examples/node-example/package.json create mode 100644 examples/node-example/test/tests.ts create mode 100644 examples/node-example/tsconfig.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad7636299..4c6433976 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -88,6 +88,12 @@ jobs: yarn hardhat compile yarn hardhat deploy-zksync + - name: Test node example + run: | + cd examples/node-example + yarn hardhat compile + yarn hardhat test + - name: Test zkvyper example run: | cd examples/vyper-example diff --git a/examples/node-example/.eslintrc.js b/examples/node-example/.eslintrc.js new file mode 100644 index 000000000..889740f22 --- /dev/null +++ b/examples/node-example/.eslintrc.js @@ -0,0 +1,7 @@ +module.exports = { + extends: [`${__dirname}/../../config/eslint/eslintrc.js`], + parserOptions: { + project: `${__dirname}/tsconfig.json`, + sourceType: "module", + }, +}; diff --git a/examples/node-example/.gitignore b/examples/node-example/.gitignore new file mode 100644 index 000000000..183b3751a --- /dev/null +++ b/examples/node-example/.gitignore @@ -0,0 +1,3 @@ +cache +artifacts +contracts/tmp diff --git a/examples/node-example/README.md b/examples/node-example/README.md new file mode 100644 index 000000000..a21a70247 --- /dev/null +++ b/examples/node-example/README.md @@ -0,0 +1,61 @@ +# zkSync 2.0 deploy environment example + +This project demonstrates how to compile and deploy your contracts in zkSync 2.0 using the Hardhat plugins. + +## Prerequisites + +- node.js 14.x or later. +- yarn. + +## Configuration + +Plugin configuration is located in [`hardhat.config.ts`](./hardhat.config.ts). +You should only change the zkSync network configuration. + +`hardhat.config.ts` example with zkSync network configured with the name `zkTestnet` and `goerli` used as the underlying layer 1 network: +```ts +import "@matterlabs/hardhat-zksync-deploy"; +import { HardhatUserConfig } from 'hardhat/types'; + +const config: HardhatUserConfig = { + networks: { + goerli: { + url: 'https://goerli.infura.io/v3/' // you can use either the URL of the Ethereum Web3 RPC, or the identifier of the network (e.g. `mainnet` or `rinkeby`) + }, + zkTestnet: { + url: 'https://zksync2-testnet.zksync.dev', // you should use the URL of the zkSync network RPC + ethNetwork: 'goerli', + zksync: true + }, + } +}; + +export default config; +``` + +## Usage + +Before using plugins, you need to build them first + +```sh +# Run the following in the *root* of the repo. +yarn +yarn build +``` + +After that you should be able to run plugins: + +```sh +# Run the following in `examples/basic-example` folder. +yarn +yarn hardhat compile +yarn hardhat deploy-zksync +``` + +- `yarn hardhat compile`: compiles all the contracts in the `contracts` folder. +- `yarn hardhat deploy-zksync`: runs all the deploy scripts in the `deploy` folder. + - To run a specific script, add the `--script` argument, e.g. `--script 001_deploy.ts`. + - To run on a specific zkSync network, use standard hardhat `--network` argument, e.g. `--network zkTestnet` + (with `zkTestnet` network specified in the `hardhat.config` networks section, with the `zksync` flag set to `true` and `ethNetwork` specified). + +If you don't specify zkSync network (`--network`), `local-setup` with (Ethereum RPC URL) and (zkSync RPC URL) will be used. diff --git a/examples/node-example/contracts/Greeter.sol b/examples/node-example/contracts/Greeter.sol new file mode 100644 index 000000000..f502c1ad1 --- /dev/null +++ b/examples/node-example/contracts/Greeter.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract Greeter { + string greeting; + constructor(string memory _greeting) { + greeting = _greeting; + } + + function greet() public view returns (string memory) { + return greeting; + } + + function setGreeting(string memory _greeting) public { + greeting = _greeting; + } +} diff --git a/examples/node-example/deploy/001_deploy.js b/examples/node-example/deploy/001_deploy.js new file mode 100644 index 000000000..406936339 --- /dev/null +++ b/examples/node-example/deploy/001_deploy.js @@ -0,0 +1,44 @@ +const ethers = require('ethers'); +const zk = require('zksync-web3'); +const { Deployer } = require('@matterlabs/hardhat-zksync-deploy'); +const chalk = require('chalk'); + +// An example of a deploy script which will deploy and call a simple contract. +module.exports = async function (hre) { + console.info(chalk.yellow(`Running deploy script for the Greeter contract`)); + + // Initialize an Ethereum wallet. + const testMnemonic = 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle'; + const zkWallet = zk.Wallet.fromMnemonic(testMnemonic, "m/44'/60'/0'/0/0"); + + // Create deployer object and load desired artifact. + const deployer = new Deployer(hre, zkWallet); + + // Deposit some funds to L2 in order to be able to perform deposits. + const depositHandle = await deployer.zkWallet.deposit({ + to: deployer.zkWallet.address, + token: zk.utils.ETH_ADDRESS, + amount: ethers.utils.parseEther('0.001'), + }); + await depositHandle.wait(); + + // Load the artifact we want to deploy. + const artifact = await deployer.loadArtifact('Greeter'); + + // Deploy this contract. The returned object will be of a `Contract` type, similarly to ones in `ethers`. + // `greeting` is an argument for contract constructor. + const greeting = 'Hi there!'; + const greeterContract = await deployer.deploy(artifact, [greeting]); + + // Show the contract info. + const contractAddress = greeterContract.address; + console.info(chalk.green(`${artifact.contractName} was deployed to ${contractAddress}!`)); + + // Call the deployed contract. + const greetingFromContract = await greeterContract.greet(); + if (greetingFromContract == greeting) { + console.info(chalk.green(`Successful greeting from the contract`)); + } else { + throw new Error(`Contract returned unexpected greeting: ${greetingFromContract}`); + } +}; diff --git a/examples/node-example/hardhat.config.ts b/examples/node-example/hardhat.config.ts new file mode 100644 index 000000000..f18b97607 --- /dev/null +++ b/examples/node-example/hardhat.config.ts @@ -0,0 +1,37 @@ +import '@matterlabs/hardhat-zksync-deploy'; +import '@matterlabs/hardhat-zksync-solc'; +import '@matterlabs/hardhat-zksync-node'; + +import { HardhatUserConfig } from 'hardhat/config'; + +const config: HardhatUserConfig = { + zksolc: { + compilerSource: 'binary', + settings: { + isSystem: true, + optimizer: { + enabled: true, + }, + } + }, + networks: { + hardhat: { + zksync: true, + }, + ethNetwork: { + url: 'http://0.0.0.0:8545', + }, + zkSyncNetwork: { + url: 'http://0.0.0.0:3050', + ethNetwork: 'ethNetwork', + zksync: true, + }, + }, + // Docker image only works for solidity ^0.8.0. + // For earlier versions you need to use binary releases of zksolc. + solidity: { + version: '0.8.17', + }, +}; + +export default config; diff --git a/examples/node-example/package.json b/examples/node-example/package.json new file mode 100644 index 000000000..5d5d57491 --- /dev/null +++ b/examples/node-example/package.json @@ -0,0 +1,53 @@ +{ + "name": "harhat-zksync-example-node", + "version": "0.1.0", + "author": "Matter Labs", + "license": "MIT", + "scripts": { + "lint": "yarn prettier --check && yarn eslint", + "lint:fix": "yarn eslint --fix", + "fmt": "yarn prettier --write", + "eslint": "eslint deploy/*.ts", + "prettier": "prettier deploy/*.ts", + "test": "mocha test/tests.ts --exit", + "build": "tsc --build .", + "clean": "rimraf dist" + }, + "devDependencies": { + "@types/node": "^18.11.17", + "@types/chai": "^4.2.0", + "@types/mocha": "^9.1.0", + "@typescript-eslint/eslint-plugin": "4.29.2", + "@typescript-eslint/parser": "5.13.0", + "eslint": "^8.10.0", + "eslint-config-prettier": "8.3.0", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-prettier": "3.4.0", + "chai": "^4.3.7", + "mocha": "^10.1.0", + "prettier": "2.3.2", + "rimraf": "^3.0.2", + "ts-node": "^10.6.0", + "typescript": "^4.6.2" + }, + "dependencies": { + "@matterlabs/hardhat-zksync-deploy": "link:../../packages/hardhat-zksync-deploy", + "@matterlabs/hardhat-zksync-solc": "link:../../packages/hardhat-zksync-solc", + "@matterlabs/hardhat-zksync-node": "link:../../packages/hardhat-zksync-node", + "@matterlabs/hardhat-zksync-chai-matchers": "link:../../packages/hardhat-zksync-chai-matchers", + "chalk": "4.1.2", + "hardhat": "^2.14.0", + "ethers": "~5.7.2", + "zksync-web3": "^0.14.3", + "@matterlabs/zksync-contracts": "^0.6.1", + "@openzeppelin/contracts": "^4.9.2", + "@openzeppelin/contracts-upgradeable": "^4.9.2" + }, + "prettier": { + "tabWidth": 4, + "printWidth": 120, + "parser": "typescript", + "singleQuote": true, + "bracketSpacing": true + } +} diff --git a/examples/node-example/test/tests.ts b/examples/node-example/test/tests.ts new file mode 100644 index 000000000..1d986eefc --- /dev/null +++ b/examples/node-example/test/tests.ts @@ -0,0 +1,35 @@ +import { expect } from "chai"; +import { Contract, Wallet } from "zksync-web3"; +import { Deployer } from "@matterlabs/hardhat-zksync-deploy"; +import { ZkSyncArtifact } from '@matterlabs/hardhat-zksync-deploy/src/types'; +import { ZkSyncProviderAdapter } from "@matterlabs/hardhat-zksync-node"; +import * as hre from "hardhat"; + +describe("Greeter", function () { + let deployer: Deployer; + let artifact: ZkSyncArtifact; + let contract: Contract; + + beforeEach(async function () { + // Deploy the contract before each test + deployer = new Deployer(hre, new Wallet('0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110', (hre.network.provider as ZkSyncProviderAdapter)._zkSyncProvider)); + artifact = await deployer.loadArtifact('Greeter'); + contract = await deployer.deploy(artifact, ['Hello, world!']); + }); + + // Test the constructor + it("Should set the greeting to the constructor argument", async function () { + expect(await contract.greet()).to.equal("Hello, world!"); + }); + + // Test the greet() function + it("Should return the current greeting", async function () { + expect(await contract.greet()).to.equal("Hello, world!"); + }); + + // Test the setGreeting() function + it("Should set a new greeting", async function () { + await contract.setGreeting("Hello, Ethereum!"); + expect(await contract.greet()).to.equal("Hello, Ethereum!"); + }); +}); diff --git a/examples/node-example/tsconfig.json b/examples/node-example/tsconfig.json new file mode 100644 index 000000000..c6570050d --- /dev/null +++ b/examples/node-example/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "es5", + "module": "commonjs", + "strict": true, + "esModuleInterop": true, + "moduleResolution": "node", + "forceConsistentCasingInFileNames": true, + "outDir": "dist" + }, + "include": [ + "./hardhat.config.ts", + "./scripts", + "./deploy", + "./test", + "typechain/**/*" + ] +} diff --git a/packages/hardhat-zksync-node/src/index.ts b/packages/hardhat-zksync-node/src/index.ts index 1fa2aaddb..63bb13ced 100644 --- a/packages/hardhat-zksync-node/src/index.ts +++ b/packages/hardhat-zksync-node/src/index.ts @@ -270,6 +270,9 @@ task( const files = await run(TASK_TEST_GET_TEST_FILES, { testFiles }); + // Download the binary, if necessary + const binaryPath: string = await run(TASK_NODE_ZKSYNC_DOWNLOAD_BINARY, { force: false }); + // Start the zkSync node using TASK_RUN_NODE_ZKSYNC_IN_SEPARATE_PROCESS const taskArgs: any[] = [ /* Add necessary arguments here */ diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index 783f745fa..e8258e44c 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -316,7 +316,7 @@ export async function isPortAvailable(port: number): Promise { return availableIPv4 && availableIPv6; } -export async function waitForNodeToBeReady(port: number, maxAttempts: number = 10): Promise { +export async function waitForNodeToBeReady(port: number, maxAttempts: number = 20): Promise { const rpcEndpoint = `http://localhost:${port}`; const payload = { @@ -339,7 +339,7 @@ export async function waitForNodeToBeReady(port: number, maxAttempts: number = 1 } attempts++; - await new Promise((r) => setTimeout(r, 500)); // Wait for 500ms before the next attempt. + await new Promise((r) => setTimeout(r, 1000)); // Wait for 1000ms before the next attempt. } throw new ZkSyncNodePluginError("Server didn't respond after multiple attempts"); From d6ccc160809ecd00fdac52c165aba5e1bec5f103 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 13 Oct 2023 17:06:38 +0200 Subject: [PATCH 16/17] Add exponential backoff to wait for node --- packages/hardhat-zksync-node/package.json | 2 +- packages/hardhat-zksync-node/src/constants.ts | 2 +- packages/hardhat-zksync-node/src/utils.ts | 23 +++++++++++++------ 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/packages/hardhat-zksync-node/package.json b/packages/hardhat-zksync-node/package.json index 1e13aac88..b4aab73a2 100644 --- a/packages/hardhat-zksync-node/package.json +++ b/packages/hardhat-zksync-node/package.json @@ -1,6 +1,6 @@ { "name": "@matterlabs/hardhat-zksync-node", - "version": "0.0.1-beta.2", + "version": "0.0.1-beta.3", "description": "Hardhat plugin to run zkSync era-test-node locally", "repository": "github:matter-labs/hardhat-zksync", "homepage": "https://github.com/matter-labs/hardhat-zksync/tree/main/packages/hardhat-zksync-node", diff --git a/packages/hardhat-zksync-node/src/constants.ts b/packages/hardhat-zksync-node/src/constants.ts index df346153c..109786112 100644 --- a/packages/hardhat-zksync-node/src/constants.ts +++ b/packages/hardhat-zksync-node/src/constants.ts @@ -32,7 +32,7 @@ export const PORT_CHECK_DELAY = 500; export const RPC_ENDPOINT_PATH = 'eth_chainId'; export const ZKSYNC_ERA_TEST_NODE_NETWORK_NAME = 'zkSyncEraTestNode'; -export const BASE_URL = `http://localhost`; +export const BASE_URL = `http://127.0.0.1`; export const NETWORK_ACCOUNTS = { REMOTE: 'remote', }; diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index e8258e44c..8e3ffbdea 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -167,8 +167,7 @@ export async function getLatestRelease(owner: string, repo: string, userAgent: s if (error.response) { // The request was made and the server responded with a status code outside of the range of 2xx throw new ZkSyncNodePluginError( - `Failed to get latest release for ${owner}/${repo}. Status: ${ - error.response.status + `Failed to get latest release for ${owner}/${repo}. Status: ${error.response.status }, Data: ${JSON.stringify(error.response.data)}` ); } else if (error.request) { @@ -317,16 +316,20 @@ export async function isPortAvailable(port: number): Promise { } export async function waitForNodeToBeReady(port: number, maxAttempts: number = 20): Promise { - const rpcEndpoint = `http://localhost:${port}`; + const rpcEndpoint = `http://127.0.0.1:${port}`; const payload = { jsonrpc: '2.0', method: 'eth_chainId', params: [], - id: new Date().getTime(), // Unique ID for the request + id: new Date().getTime(), }; let attempts = 0; + let waitTime = 1000; // Initial wait time in milliseconds + const backoffFactor = 2; + const maxWaitTime = 30000; // Maximum wait time (e.g., 30 seconds) + while (attempts < maxAttempts) { try { const response = await axios.post(rpcEndpoint, payload); @@ -334,15 +337,21 @@ export async function waitForNodeToBeReady(port: number, maxAttempts: number = 2 if (response.data && response.data.result) { return; // The node responded with a valid chain ID } - } catch (e) { + } catch (e: any) { + // console.error(`Attempt ${attempts + 1} failed with error:`, e.message); // If it fails, it will just try again } attempts++; - await new Promise((r) => setTimeout(r, 1000)); // Wait for 1000ms before the next attempt. + + // Wait before the next attempt + await new Promise((r) => setTimeout(r, waitTime)); + + // Update the wait time for the next attempt + waitTime = Math.min(waitTime * backoffFactor, maxWaitTime); } - throw new ZkSyncNodePluginError("Server didn't respond after multiple attempts"); + throw new Error("Server didn't respond after multiple attempts"); } export async function getAvailablePort(startPort: number, maxAttempts: number): Promise { From e2d21b7efab27c4a012153dc381d24d24e7e0bd2 Mon Sep 17 00:00:00 2001 From: Mimi TxFusion Date: Fri, 13 Oct 2023 17:10:31 +0200 Subject: [PATCH 17/17] Fix error type --- packages/hardhat-zksync-node/src/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/hardhat-zksync-node/src/utils.ts b/packages/hardhat-zksync-node/src/utils.ts index 8e3ffbdea..ccec3ce52 100644 --- a/packages/hardhat-zksync-node/src/utils.ts +++ b/packages/hardhat-zksync-node/src/utils.ts @@ -351,7 +351,7 @@ export async function waitForNodeToBeReady(port: number, maxAttempts: number = 2 waitTime = Math.min(waitTime * backoffFactor, maxWaitTime); } - throw new Error("Server didn't respond after multiple attempts"); + throw new ZkSyncNodePluginError("Server didn't respond after multiple attempts"); } export async function getAvailablePort(startPort: number, maxAttempts: number): Promise {