-
Notifications
You must be signed in to change notification settings - Fork 2.3k
Gracelessly shove Hardhat support into truffle debug
#5410
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,183 @@ | ||
import type * as Hardhat from "hardhat/types"; | ||
|
||
import type { | ||
Compilation, | ||
CompiledContract, | ||
Bytecode, | ||
LinkReference | ||
} from "../types"; | ||
|
||
const supportedFormats = new Set(["hh-sol-build-info-1"]); | ||
|
||
export function buildInfoCompilation( | ||
buildInfo: Hardhat.BuildInfo | ||
): Compilation { | ||
const { _format } = buildInfo; | ||
|
||
if (!supportedFormats.has(_format)) { | ||
throw new Error(`Unsupported build info format: ${_format}`); | ||
} | ||
|
||
const sourceIndexes = buildInfoSourceIndexes(buildInfo); | ||
|
||
return { | ||
sourceIndexes, | ||
sources: buildInfoSources(buildInfo, sourceIndexes), | ||
contracts: buildInfoContracts(buildInfo), | ||
compiler: { | ||
name: "solc", | ||
version: buildInfo.solcLongVersion | ||
} | ||
}; | ||
} | ||
|
||
function buildInfoSourceIndexes( | ||
buildInfo: Hardhat.BuildInfo | ||
): Compilation["sourceIndexes"] { | ||
const sourceIndexes = []; | ||
for (const { index, sourcePath } of Object.entries( | ||
buildInfo.output.sources | ||
).map(([sourcePath, source]) => ({ index: source.id, sourcePath }))) { | ||
sourceIndexes[index] = sourcePath; | ||
} | ||
|
||
return sourceIndexes; | ||
} | ||
|
||
function buildInfoSources( | ||
buildInfo: Hardhat.BuildInfo, | ||
sourceIndexes: Compilation["sourceIndexes"] | ||
): Compilation["sources"] { | ||
return sourceIndexes.map(sourcePath => { | ||
// to handle if sourceIndexes is a sparse array | ||
if (!sourcePath) { | ||
return; | ||
} | ||
|
||
const inputSource = buildInfo.input.sources[sourcePath]; | ||
const outputSource = buildInfo.output.sources[sourcePath]; | ||
|
||
return { | ||
sourcePath, | ||
contents: inputSource.content, | ||
ast: outputSource.ast, | ||
language: buildInfo.input.language | ||
}; | ||
}); | ||
} | ||
|
||
function buildInfoContracts(buildInfo: Hardhat.BuildInfo): CompiledContract[] { | ||
const contracts = []; | ||
for (const [sourcePath, sourceContracts] of Object.entries( | ||
buildInfo.output.contracts | ||
)) { | ||
for (const [contractName, compilerOutputContract] of Object.entries( | ||
sourceContracts | ||
)) { | ||
const contract: CompiledContract = { | ||
contractName, | ||
sourcePath, | ||
source: buildInfo.input.sources[sourcePath].content, | ||
sourceMap: compilerOutputContract.evm.bytecode.sourceMap, | ||
deployedSourceMap: | ||
compilerOutputContract.evm.deployedBytecode.sourceMap, | ||
legacyAST: undefined, | ||
ast: buildInfo.output.sources[sourcePath].ast, | ||
abi: compilerOutputContract.abi, | ||
metadata: (compilerOutputContract as any).metadata, | ||
bytecode: zeroLinkReferences({ | ||
bytes: compilerOutputContract.evm.bytecode.object, | ||
linkReferences: formatLinkReferences( | ||
compilerOutputContract.evm.bytecode.linkReferences | ||
) | ||
}), | ||
deployedBytecode: zeroLinkReferences({ | ||
bytes: compilerOutputContract.evm.deployedBytecode.object, | ||
linkReferences: formatLinkReferences( | ||
compilerOutputContract.evm.deployedBytecode.linkReferences | ||
) | ||
}), | ||
compiler: { | ||
name: "solc", | ||
version: buildInfo.solcLongVersion | ||
}, | ||
devdoc: undefined, | ||
userdoc: undefined, | ||
immutableReferences: | ||
compilerOutputContract.evm.deployedBytecode.immutableReferences, | ||
generatedSources: (compilerOutputContract.evm.bytecode as any) | ||
.generatedSources, | ||
deployedGeneratedSources: ( | ||
compilerOutputContract.evm.deployedBytecode as any | ||
).generatedSources | ||
}; | ||
|
||
contracts.push(contract); | ||
} | ||
} | ||
|
||
return contracts; | ||
} | ||
|
||
// HACK stolen from compile-solidity | ||
function formatLinkReferences( | ||
linkReferences: Hardhat.CompilerOutputBytecode["linkReferences"] | ||
): LinkReference[] { | ||
if (!linkReferences) { | ||
return []; | ||
} | ||
|
||
// convert to flat list | ||
const libraryLinkReferences = Object.values(linkReferences) | ||
.map(fileLinks => | ||
Object.entries(fileLinks).map(([name, links]) => ({ | ||
name, | ||
links | ||
})) | ||
) | ||
.reduce((a, b) => [...a, ...b], []); | ||
|
||
// convert to { offsets, length, name } format | ||
return libraryLinkReferences.map(({ name, links }) => ({ | ||
offsets: links.map(({ start }) => start), | ||
length: links[0].length, // HACK just assume they're going to be the same | ||
name | ||
})); | ||
} | ||
|
||
// HACK stolen from compile-solidity | ||
// takes linkReferences in output format (not Solidity's format) | ||
function zeroLinkReferences({ | ||
bytes, | ||
linkReferences | ||
}: { | ||
bytes: string; | ||
linkReferences: LinkReference[]; | ||
}): Bytecode { | ||
if (bytes === undefined) { | ||
return undefined; | ||
} | ||
// inline link references - start by flattening the offsets | ||
const flattenedLinkReferences = linkReferences | ||
// map each link ref to array of link refs with only one offset | ||
.map(({ offsets, length, name }) => | ||
offsets.map(offset => ({ offset, length, name })) | ||
) | ||
// flatten | ||
.reduce((a, b) => [...a, ...b], []); | ||
|
||
// then overwite bytes with zeroes | ||
bytes = flattenedLinkReferences.reduce((bytes, { offset, length }) => { | ||
// length is a byte offset | ||
const characterLength = length * 2; | ||
const start = offset * 2; | ||
|
||
const zeroes = "0".repeat(characterLength); | ||
|
||
return `${bytes.substring(0, start)}${zeroes}${bytes.substring( | ||
start + characterLength | ||
)}`; | ||
}, bytes); | ||
|
||
return { bytes, linkReferences }; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,3 @@ | ||
export * as LegacyToNew from "./LegacyToNew"; | ||
export * as NewToLegacy from "./NewToLegacy"; | ||
export * as Hardhat from "./FromHardhat"; |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,21 +1,50 @@ | ||
module.exports = async function (options) { | ||
const fs = require("fs/promises"); | ||
const { promisify } = require("util"); | ||
const debugModule = require("debug"); | ||
const loadConfig = require("../../loadConfig"); | ||
const debug = debugModule("lib:commands:debug"); | ||
|
||
const Config = require("@truffle/config"); | ||
const { Environment } = require("@truffle/environment"); | ||
const { CLIDebugger } = require("../../debug"); | ||
const CompileCommon = require("@truffle/compile-common"); | ||
const Codec = require("@truffle/codec"); | ||
|
||
if (options.url && options.network) { | ||
throw new Error("Url and Network options should not be specified together"); | ||
} | ||
|
||
let config = loadConfig(options); | ||
let config; | ||
let compilations; | ||
try { | ||
config = loadConfig(options); | ||
} catch { | ||
config = Config.default(); | ||
config.network = "development"; | ||
config.networks["development"] = { | ||
url: "http://127.0.0.1:8545", | ||
network_id: "*" | ||
}; | ||
|
||
require(`${config.working_directory}/node_modules/hardhat/register`); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not needed if you import hardhat below There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I tried it without this and this line was necessary... I think I'm going to go with the approach of shelling out to |
||
const { | ||
artifacts | ||
} = require(`${config.working_directory}/node_modules/hardhat`); | ||
const buildInfoPaths = await artifacts.getBuildInfoPaths(); | ||
const buildInfos = await Promise.all( | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This will lead to an OOM. I'd read one build info at the time, as they can be huge. They have the entire solc output, so they can take 100mb+ in some large projects. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Good call! |
||
buildInfoPaths.map(async buildInfoPath => | ||
JSON.parse(await fs.readFile(buildInfoPath)) | ||
) | ||
); | ||
compilations = Codec.Compilations.Utils.shimCompilations( | ||
buildInfos.map(CompileCommon.Shims.Hardhat.buildInfoCompilation) | ||
); | ||
} | ||
|
||
await Environment.detect(config); | ||
|
||
const txHash = config._[0]; //may be undefined | ||
const txHash = options._[0]; //may be undefined | ||
if (config.fetchExternal && txHash === undefined) { | ||
throw new Error( | ||
"Fetch-external mode requires a specific transaction to debug" | ||
|
@@ -27,6 +56,9 @@ module.exports = async function (options) { | |
if (config.compileAll && config.compileNone) { | ||
throw new Error("Incompatible options passed regarding what to compile"); | ||
} | ||
const interpreter = await new CLIDebugger(config, { txHash }).run(); | ||
const interpreter = await new CLIDebugger(config, { | ||
compilations, | ||
txHash | ||
}).run(); | ||
return await promisify(interpreter.start.bind(interpreter))(); | ||
}; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
<3
It's great that you are validating this!
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Of course :D