Skip to content
This repository has been archived by the owner on Feb 26, 2024. It is now read-only.

Gracelessly shove Hardhat support into truffle debug #5410

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/compile-common/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"@truffle/contract-schema": "^3.4.8",
"@types/mocha": "^5.2.7",
"@types/node": "12.12.21",
"hardhat": "^2.10.1",
"mocha": "9.2.2",
"ts-node": "10.7.0",
"typescript": "^4.1.4"
Expand Down
183 changes: 183 additions & 0 deletions packages/compile-common/src/shims/FromHardhat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
import type * as Hardhat from "hardhat/types";

import type {
Compilation,
CompiledContract,
Bytecode,
LinkReference
} from "../types";

const supportedFormats = new Set(["hh-sol-build-info-1"]);

export function buildInfoCompilation(
buildInfo: Hardhat.BuildInfo
): Compilation {
const { _format } = buildInfo;

if (!supportedFormats.has(_format)) {
throw new Error(`Unsupported build info format: ${_format}`);

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

<3

It's great that you are validating this!

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Of course :D

}

const sourceIndexes = buildInfoSourceIndexes(buildInfo);

return {
sourceIndexes,
sources: buildInfoSources(buildInfo, sourceIndexes),
contracts: buildInfoContracts(buildInfo),
compiler: {
name: "solc",
version: buildInfo.solcLongVersion
}
};
}

function buildInfoSourceIndexes(
buildInfo: Hardhat.BuildInfo
): Compilation["sourceIndexes"] {
const sourceIndexes = [];
for (const { index, sourcePath } of Object.entries(
buildInfo.output.sources
).map(([sourcePath, source]) => ({ index: source.id, sourcePath }))) {
sourceIndexes[index] = sourcePath;
}

return sourceIndexes;
}

function buildInfoSources(
buildInfo: Hardhat.BuildInfo,
sourceIndexes: Compilation["sourceIndexes"]
): Compilation["sources"] {
return sourceIndexes.map(sourcePath => {
// to handle if sourceIndexes is a sparse array
if (!sourcePath) {
return;
}

const inputSource = buildInfo.input.sources[sourcePath];
const outputSource = buildInfo.output.sources[sourcePath];

return {
sourcePath,
contents: inputSource.content,
ast: outputSource.ast,
language: buildInfo.input.language
};
});
}

function buildInfoContracts(buildInfo: Hardhat.BuildInfo): CompiledContract[] {
const contracts = [];
for (const [sourcePath, sourceContracts] of Object.entries(
buildInfo.output.contracts
)) {
for (const [contractName, compilerOutputContract] of Object.entries(
sourceContracts
)) {
const contract: CompiledContract = {
contractName,
sourcePath,
source: buildInfo.input.sources[sourcePath].content,
sourceMap: compilerOutputContract.evm.bytecode.sourceMap,
deployedSourceMap:
compilerOutputContract.evm.deployedBytecode.sourceMap,
legacyAST: undefined,
ast: buildInfo.output.sources[sourcePath].ast,
abi: compilerOutputContract.abi,
metadata: (compilerOutputContract as any).metadata,
bytecode: zeroLinkReferences({
bytes: compilerOutputContract.evm.bytecode.object,
linkReferences: formatLinkReferences(
compilerOutputContract.evm.bytecode.linkReferences
)
}),
deployedBytecode: zeroLinkReferences({
bytes: compilerOutputContract.evm.deployedBytecode.object,
linkReferences: formatLinkReferences(
compilerOutputContract.evm.deployedBytecode.linkReferences
)
}),
compiler: {
name: "solc",
version: buildInfo.solcLongVersion
},
devdoc: undefined,
userdoc: undefined,
immutableReferences:
compilerOutputContract.evm.deployedBytecode.immutableReferences,
generatedSources: (compilerOutputContract.evm.bytecode as any)
.generatedSources,
deployedGeneratedSources: (
compilerOutputContract.evm.deployedBytecode as any
).generatedSources
};

contracts.push(contract);
}
}

return contracts;
}

// HACK stolen from compile-solidity
function formatLinkReferences(
linkReferences: Hardhat.CompilerOutputBytecode["linkReferences"]
): LinkReference[] {
if (!linkReferences) {
return [];
}

// convert to flat list
const libraryLinkReferences = Object.values(linkReferences)
.map(fileLinks =>
Object.entries(fileLinks).map(([name, links]) => ({
name,
links
}))
)
.reduce((a, b) => [...a, ...b], []);

// convert to { offsets, length, name } format
return libraryLinkReferences.map(({ name, links }) => ({
offsets: links.map(({ start }) => start),
length: links[0].length, // HACK just assume they're going to be the same
name
}));
}

// HACK stolen from compile-solidity
// takes linkReferences in output format (not Solidity's format)
function zeroLinkReferences({
bytes,
linkReferences
}: {
bytes: string;
linkReferences: LinkReference[];
}): Bytecode {
if (bytes === undefined) {
return undefined;
}
// inline link references - start by flattening the offsets
const flattenedLinkReferences = linkReferences
// map each link ref to array of link refs with only one offset
.map(({ offsets, length, name }) =>
offsets.map(offset => ({ offset, length, name }))
)
// flatten
.reduce((a, b) => [...a, ...b], []);

// then overwite bytes with zeroes
bytes = flattenedLinkReferences.reduce((bytes, { offset, length }) => {
// length is a byte offset
const characterLength = length * 2;
const start = offset * 2;

const zeroes = "0".repeat(characterLength);

return `${bytes.substring(0, start)}${zeroes}${bytes.substring(
start + characterLength
)}`;
}, bytes);

return { bytes, linkReferences };
}
1 change: 1 addition & 0 deletions packages/compile-common/src/shims/index.ts
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
export * as LegacyToNew from "./LegacyToNew";
export * as NewToLegacy from "./NewToLegacy";
export * as Hardhat from "./FromHardhat";
38 changes: 35 additions & 3 deletions packages/core/lib/commands/debug/run.js
Original file line number Diff line number Diff line change
@@ -1,21 +1,50 @@
module.exports = async function (options) {
const fs = require("fs/promises");
const { promisify } = require("util");
const debugModule = require("debug");
const loadConfig = require("../../loadConfig");
const debug = debugModule("lib:commands:debug");

const Config = require("@truffle/config");
const { Environment } = require("@truffle/environment");
const { CLIDebugger } = require("../../debug");
const CompileCommon = require("@truffle/compile-common");
const Codec = require("@truffle/codec");

if (options.url && options.network) {
throw new Error("Url and Network options should not be specified together");
}

let config = loadConfig(options);
let config;
let compilations;
try {
config = loadConfig(options);
} catch {
config = Config.default();
config.network = "development";
config.networks["development"] = {
url: "http://127.0.0.1:8545",
network_id: "*"
};

require(`${config.working_directory}/node_modules/hardhat/register`);
Copy link

@alcuadrado alcuadrado Aug 9, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not needed if you import hardhat below

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried it without this and this line was necessary... I think I'm going to go with the approach of shelling out to node -r hardhat/register instead of this.

const {
artifacts
} = require(`${config.working_directory}/node_modules/hardhat`);
const buildInfoPaths = await artifacts.getBuildInfoPaths();
const buildInfos = await Promise.all(

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will lead to an OOM. I'd read one build info at the time, as they can be huge. They have the entire solc output, so they can take 100mb+ in some large projects.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good call!

buildInfoPaths.map(async buildInfoPath =>
JSON.parse(await fs.readFile(buildInfoPath))
)
);
compilations = Codec.Compilations.Utils.shimCompilations(
buildInfos.map(CompileCommon.Shims.Hardhat.buildInfoCompilation)
);
}

await Environment.detect(config);

const txHash = config._[0]; //may be undefined
const txHash = options._[0]; //may be undefined
if (config.fetchExternal && txHash === undefined) {
throw new Error(
"Fetch-external mode requires a specific transaction to debug"
Expand All @@ -27,6 +56,9 @@ module.exports = async function (options) {
if (config.compileAll && config.compileNone) {
throw new Error("Incompatible options passed regarding what to compile");
}
const interpreter = await new CLIDebugger(config, { txHash }).run();
const interpreter = await new CLIDebugger(config, {
compilations,
txHash
}).run();
return await promisify(interpreter.start.bind(interpreter))();
};
Loading