commit
18084588ea
@ -8,7 +8,7 @@
|
||||
"eip158Block": 3,
|
||||
"byzantiumBlock": 4,
|
||||
"clique": {
|
||||
"period": 0,
|
||||
"period": 1,
|
||||
"epoch": 30000
|
||||
}
|
||||
},
|
||||
|
@ -306,6 +306,8 @@ export interface TraceParams {
|
||||
disableMemory?: boolean;
|
||||
disableStack?: boolean;
|
||||
disableStorage?: boolean;
|
||||
tracer?: string;
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export type OutputField =
|
||||
|
@ -74,19 +74,19 @@ export const coverageHandler: SingleFileSubtraceHandler = (
|
||||
|
||||
let sourceRanges = _.map(subtrace, structLog => pcToSourceRange[structLog.pc]);
|
||||
sourceRanges = _.compact(sourceRanges); // Some PC's don't map to a source range and we just ignore them.
|
||||
// By default lodash does a shallow object comparasion. We JSON.stringify them and compare as strings.
|
||||
// By default lodash does a shallow object comparison. We JSON.stringify them and compare as strings.
|
||||
sourceRanges = _.uniqBy(sourceRanges, s => JSON.stringify(s)); // We don't care if one PC was covered multiple times within a single transaction
|
||||
sourceRanges = _.filter(sourceRanges, sourceRange => sourceRange.fileName === absoluteFileName);
|
||||
const branchCoverage: BranchCoverage = {};
|
||||
const branchIds = _.keys(coverageEntriesDescription.branchMap);
|
||||
for (const branchId of branchIds) {
|
||||
const branchDescription = coverageEntriesDescription.branchMap[branchId];
|
||||
const isBranchCoveredByBranchIndex = _.map(branchDescription.locations, location => {
|
||||
const branchIndexToIsBranchCovered = _.map(branchDescription.locations, location => {
|
||||
const isBranchCovered = _.some(sourceRanges, range => utils.isRangeInside(range.location, location));
|
||||
const timesBranchCovered = Number(isBranchCovered);
|
||||
return timesBranchCovered;
|
||||
});
|
||||
branchCoverage[branchId] = isBranchCoveredByBranchIndex;
|
||||
branchCoverage[branchId] = branchIndexToIsBranchCovered;
|
||||
}
|
||||
const statementCoverage: StatementCoverage = {};
|
||||
const statementIds = _.keys(coverageEntriesDescription.statementMap);
|
||||
|
@ -4,6 +4,8 @@ export {
|
||||
TruffleArtifactAdapter,
|
||||
AbstractArtifactAdapter,
|
||||
ContractData,
|
||||
SourceCodes,
|
||||
Sources,
|
||||
} from '@0x/sol-tracing-utils';
|
||||
|
||||
export {
|
||||
|
@ -3,6 +3,8 @@ export {
|
||||
SolCompilerArtifactAdapter,
|
||||
TruffleArtifactAdapter,
|
||||
ContractData,
|
||||
SourceCodes,
|
||||
Sources,
|
||||
} from '@0x/sol-tracing-utils';
|
||||
|
||||
// HACK: ProfilerSubprovider is a hacky way to do profiling using coverage tools. Not production ready
|
||||
|
@ -63,7 +63,7 @@ export const profilerHandler: SingleFileSubtraceHandler = (
|
||||
): Coverage => {
|
||||
const absoluteFileName = contractData.sources[fileIndex];
|
||||
const profilerEntriesDescription = collectCoverageEntries(contractData.sourceCodes[fileIndex]);
|
||||
const gasConsumedByStatement: { [statementId: string]: number } = {};
|
||||
const statementToGasConsumed: { [statementId: string]: number } = {};
|
||||
const statementIds = _.keys(profilerEntriesDescription.statementMap);
|
||||
for (const statementId of statementIds) {
|
||||
const statementDescription = profilerEntriesDescription.statementMap[statementId];
|
||||
@ -83,14 +83,14 @@ export const profilerHandler: SingleFileSubtraceHandler = (
|
||||
}
|
||||
}),
|
||||
);
|
||||
gasConsumedByStatement[statementId] = totalGasCost;
|
||||
statementToGasConsumed[statementId] = totalGasCost;
|
||||
}
|
||||
const partialProfilerOutput = {
|
||||
[absoluteFileName]: {
|
||||
...profilerEntriesDescription,
|
||||
path: absoluteFileName,
|
||||
f: {}, // I's meaningless in profiling context
|
||||
s: gasConsumedByStatement,
|
||||
s: statementToGasConsumed,
|
||||
b: {}, // I's meaningless in profiling context
|
||||
},
|
||||
};
|
||||
|
@ -3,6 +3,8 @@ export {
|
||||
TruffleArtifactAdapter,
|
||||
SolCompilerArtifactAdapter,
|
||||
ContractData,
|
||||
SourceCodes,
|
||||
Sources,
|
||||
} from '@0x/sol-tracing-utils';
|
||||
|
||||
export { RevertTraceSubprovider } from './revert_trace_subprovider';
|
||||
|
@ -106,7 +106,8 @@ export class RevertTraceSubprovider extends TraceCollectionSubprovider {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fileIndex = contractData.sources.indexOf(sourceRange.fileName);
|
||||
const fileNameToFileIndex = _.invert(contractData.sources);
|
||||
const fileIndex = _.parseInt(fileNameToFileIndex[sourceRange.fileName]);
|
||||
const sourceSnippet = getSourceRangeSnippet(sourceRange, contractData.sourceCodes[fileIndex]);
|
||||
if (sourceSnippet !== null) {
|
||||
sourceSnippets.push(sourceSnippet);
|
||||
|
@ -1,4 +1,29 @@
|
||||
[
|
||||
{
|
||||
"version": "4.0.0",
|
||||
"changes": [
|
||||
{
|
||||
"note": "Fix a bug with incorrect parsing of `sourceMaps` due to sources being in an array instead of a map",
|
||||
"pr": 1498
|
||||
},
|
||||
{
|
||||
"note": "Change the types of `ContractData.sources` and `ContractData.sourceCodes` to be objects instead of arrays",
|
||||
"pr": 1498
|
||||
},
|
||||
{
|
||||
"note": "Use custom JS tracer to speed up tracing on clients that support it (e.g., Geth)",
|
||||
"pr": 1498
|
||||
},
|
||||
{
|
||||
"note": "Log errors encountered in `TraceCollectionSubprovider`",
|
||||
"pr": 1498
|
||||
},
|
||||
{
|
||||
"note": "Add support for assembly statements",
|
||||
"pr": 1498
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"version": "3.0.0",
|
||||
"changes": [
|
||||
|
@ -5,7 +5,7 @@ import * as glob from 'glob';
|
||||
import * as _ from 'lodash';
|
||||
import * as path from 'path';
|
||||
|
||||
import { ContractData } from '../types';
|
||||
import { ContractData, SourceCodes, Sources } from '../types';
|
||||
|
||||
import { AbstractArtifactAdapter } from './abstract_artifact_adapter';
|
||||
|
||||
@ -43,9 +43,14 @@ export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
|
||||
logUtils.warn(`${artifactFileName} doesn't contain bytecode. Skipping...`);
|
||||
continue;
|
||||
}
|
||||
let sources = _.keys(artifact.sources);
|
||||
sources = _.map(sources, relativeFilePath => path.resolve(this._sourcesPath, relativeFilePath));
|
||||
const sourceCodes = _.map(sources, (source: string) => fs.readFileSync(source).toString());
|
||||
const sources: Sources = {};
|
||||
const sourceCodes: SourceCodes = {};
|
||||
_.map(artifact.sources, (value: { id: number }, relativeFilePath: string) => {
|
||||
const filePath = path.resolve(this._sourcesPath, relativeFilePath);
|
||||
const fileContent = fs.readFileSync(filePath).toString();
|
||||
sources[value.id] = filePath;
|
||||
sourceCodes[value.id] = fileContent;
|
||||
});
|
||||
const contractData = {
|
||||
sourceCodes,
|
||||
sources,
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as _ from 'lodash';
|
||||
import * as Parser from 'solidity-parser-antlr';
|
||||
|
||||
import { BranchMap, FnMap, LocationByOffset, SingleFileSourceRange, StatementMap } from './types';
|
||||
import { BranchMap, FnMap, OffsetToLocation, SingleFileSourceRange, StatementMap } from './types';
|
||||
|
||||
export interface CoverageEntriesDescription {
|
||||
fnMap: FnMap;
|
||||
@ -22,13 +22,13 @@ export class ASTVisitor {
|
||||
private readonly _branchMap: BranchMap = {};
|
||||
private readonly _modifiersStatementIds: number[] = [];
|
||||
private readonly _statementMap: StatementMap = {};
|
||||
private readonly _locationByOffset: LocationByOffset;
|
||||
private readonly _offsetToLocation: OffsetToLocation;
|
||||
private readonly _ignoreRangesBeginningAt: number[];
|
||||
// keep track of contract/function ranges that are to be ignored
|
||||
// so we can also ignore any children nodes within the contract/function
|
||||
private readonly _ignoreRangesWithin: Array<[number, number]> = [];
|
||||
constructor(locationByOffset: LocationByOffset, ignoreRangesBeginningAt: number[] = []) {
|
||||
this._locationByOffset = locationByOffset;
|
||||
constructor(offsetToLocation: OffsetToLocation, ignoreRangesBeginningAt: number[] = []) {
|
||||
this._offsetToLocation = offsetToLocation;
|
||||
this._ignoreRangesBeginningAt = ignoreRangesBeginningAt;
|
||||
}
|
||||
public getCollectedCoverageEntries(): CoverageEntriesDescription {
|
||||
@ -94,6 +94,39 @@ export class ASTVisitor {
|
||||
public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyLocalDefinition(ast: Parser.AssemblyLocalDefinition): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyCall(ast: Parser.AssemblyCall): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyIf(ast: Parser.AssemblyIf): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyBlock(ast: Parser.AssemblyBlock): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyExpression(ast: Parser.AssemblyExpression): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyAssignment(ast: Parser.AssemblyAssignment): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public LabelDefinition(ast: Parser.LabelDefinition): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblySwitch(ast: Parser.AssemblySwitch): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyFunctionDefinition(ast: Parser.AssemblyFunctionDefinition): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public AssemblyFor(ast: Parser.AssemblyFor): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public SubAssembly(ast: Parser.SubAssembly): void {
|
||||
this._visitStatement(ast);
|
||||
}
|
||||
public BinaryOperation(ast: Parser.BinaryOperation): void {
|
||||
const BRANCHING_BIN_OPS = ['&&', '||'];
|
||||
if (_.includes(BRANCHING_BIN_OPS, ast.operator)) {
|
||||
@ -136,8 +169,8 @@ export class ASTVisitor {
|
||||
}
|
||||
private _getExpressionRange(ast: Parser.ASTNode): SingleFileSourceRange {
|
||||
const astRange = ast.range as [number, number];
|
||||
const start = this._locationByOffset[astRange[0]];
|
||||
const end = this._locationByOffset[astRange[1] + 1];
|
||||
const start = this._offsetToLocation[astRange[0]];
|
||||
const end = this._offsetToLocation[astRange[1] + 1];
|
||||
const range = {
|
||||
start,
|
||||
end,
|
||||
|
@ -3,24 +3,24 @@ import * as _ from 'lodash';
|
||||
import * as parser from 'solidity-parser-antlr';
|
||||
|
||||
import { ASTVisitor, CoverageEntriesDescription } from './ast_visitor';
|
||||
import { getLocationByOffset } from './source_maps';
|
||||
import { getOffsetToLocation } from './source_maps';
|
||||
|
||||
const IGNORE_RE = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm;
|
||||
|
||||
// Parsing source code for each transaction/code is slow and therefore we cache it
|
||||
const coverageEntriesBySourceHash: { [sourceHash: string]: CoverageEntriesDescription } = {};
|
||||
const sourceHashToCoverageEntries: { [sourceHash: string]: CoverageEntriesDescription } = {};
|
||||
|
||||
export const collectCoverageEntries = (contractSource: string) => {
|
||||
const sourceHash = ethUtil.sha3(contractSource).toString('hex');
|
||||
if (_.isUndefined(coverageEntriesBySourceHash[sourceHash]) && !_.isUndefined(contractSource)) {
|
||||
if (_.isUndefined(sourceHashToCoverageEntries[sourceHash]) && !_.isUndefined(contractSource)) {
|
||||
const ast = parser.parse(contractSource, { range: true });
|
||||
const locationByOffset = getLocationByOffset(contractSource);
|
||||
const offsetToLocation = getOffsetToLocation(contractSource);
|
||||
const ignoreRangesBegingingAt = gatherRangesToIgnore(contractSource);
|
||||
const visitor = new ASTVisitor(locationByOffset, ignoreRangesBegingingAt);
|
||||
const visitor = new ASTVisitor(offsetToLocation, ignoreRangesBegingingAt);
|
||||
parser.visit(ast, visitor);
|
||||
coverageEntriesBySourceHash[sourceHash] = visitor.getCollectedCoverageEntries();
|
||||
sourceHashToCoverageEntries[sourceHash] = visitor.getCollectedCoverageEntries();
|
||||
}
|
||||
const coverageEntriesDescription = coverageEntriesBySourceHash[sourceHash];
|
||||
const coverageEntriesDescription = sourceHashToCoverageEntries[sourceHash];
|
||||
return coverageEntriesDescription;
|
||||
};
|
||||
|
||||
|
@ -13,7 +13,7 @@ interface ASTInfo {
|
||||
}
|
||||
|
||||
// Parsing source code for each transaction/code is slow and therefore we cache it
|
||||
const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {};
|
||||
const hashToParsedSource: { [sourceHash: string]: Parser.ASTNode } = {};
|
||||
|
||||
/**
|
||||
* Gets the source range snippet by source range to be used by revert trace.
|
||||
@ -22,10 +22,10 @@ const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {};
|
||||
*/
|
||||
export function getSourceRangeSnippet(sourceRange: SourceRange, sourceCode: string): SourceSnippet | null {
|
||||
const sourceHash = ethUtil.sha3(sourceCode).toString('hex');
|
||||
if (_.isUndefined(parsedSourceByHash[sourceHash])) {
|
||||
parsedSourceByHash[sourceHash] = Parser.parse(sourceCode, { loc: true });
|
||||
if (_.isUndefined(hashToParsedSource[sourceHash])) {
|
||||
hashToParsedSource[sourceHash] = Parser.parse(sourceCode, { loc: true });
|
||||
}
|
||||
const astNode = parsedSourceByHash[sourceHash];
|
||||
const astNode = hashToParsedSource[sourceHash];
|
||||
const visitor = new ASTInfoVisitor();
|
||||
Parser.visit(astNode, visitor);
|
||||
const astInfo = visitor.getASTInfoForRange(sourceRange);
|
||||
|
@ -22,11 +22,13 @@ export {
|
||||
BranchMap,
|
||||
EvmCallStackEntry,
|
||||
FnMap,
|
||||
LocationByOffset,
|
||||
OffsetToLocation,
|
||||
StatementMap,
|
||||
TraceInfoBase,
|
||||
TraceInfoExistingContract,
|
||||
TraceInfoNewContract,
|
||||
Sources,
|
||||
SourceCodes,
|
||||
} from './types';
|
||||
export { collectCoverageEntries } from './collect_coverage_entries';
|
||||
export { TraceCollector, SingleFileSubtraceHandler } from './trace_collector';
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import { getPcToInstructionIndexMapping } from './instructions';
|
||||
import { LocationByOffset, SourceRange } from './types';
|
||||
import { OffsetToLocation, SourceCodes, SourceRange, Sources } from './types';
|
||||
|
||||
const RADIX = 10;
|
||||
|
||||
@ -15,38 +15,41 @@ export interface SourceLocation {
|
||||
* Receives a string with newlines and returns a map of byte offset to LineColumn
|
||||
* @param str A string to process
|
||||
*/
|
||||
export function getLocationByOffset(str: string): LocationByOffset {
|
||||
const locationByOffset: LocationByOffset = { 0: { line: 1, column: 0 } };
|
||||
export function getOffsetToLocation(str: string): OffsetToLocation {
|
||||
const offsetToLocation: OffsetToLocation = { 0: { line: 1, column: 0 } };
|
||||
let currentOffset = 0;
|
||||
for (const char of str.split('')) {
|
||||
const location = locationByOffset[currentOffset];
|
||||
const location = offsetToLocation[currentOffset];
|
||||
const isNewline = char === '\n';
|
||||
locationByOffset[currentOffset + 1] = {
|
||||
offsetToLocation[currentOffset + 1] = {
|
||||
line: location.line + (isNewline ? 1 : 0),
|
||||
column: isNewline ? 0 : location.column + 1,
|
||||
};
|
||||
currentOffset++;
|
||||
}
|
||||
return locationByOffset;
|
||||
return offsetToLocation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a sourcemap string.
|
||||
* The solidity sourcemap format is documented here: https://github.com/ethereum/solidity/blob/develop/docs/miscellaneous.rst#source-mappings
|
||||
* @param sourceCodes sources contents
|
||||
* @param indexToSourceCode index to source code
|
||||
* @param srcMap source map string
|
||||
* @param bytecodeHex contract bytecode
|
||||
* @param sources sources file names
|
||||
* @param indexToSource index to source file path
|
||||
*/
|
||||
export function parseSourceMap(
|
||||
sourceCodes: string[],
|
||||
sourceCodes: SourceCodes,
|
||||
srcMap: string,
|
||||
bytecodeHex: string,
|
||||
sources: string[],
|
||||
sources: Sources,
|
||||
): { [programCounter: number]: SourceRange } {
|
||||
const bytecode = Uint8Array.from(Buffer.from(bytecodeHex, 'hex'));
|
||||
const pcToInstructionIndex: { [programCounter: number]: number } = getPcToInstructionIndexMapping(bytecode);
|
||||
const locationByOffsetByFileIndex = _.map(sourceCodes, s => (_.isUndefined(s) ? {} : getLocationByOffset(s)));
|
||||
const fileIndexToOffsetToLocation: { [fileIndex: number]: OffsetToLocation } = {};
|
||||
_.map(sourceCodes, (sourceCode: string, fileIndex: number) => {
|
||||
fileIndexToOffsetToLocation[fileIndex] = _.isUndefined(sourceCode) ? {} : getOffsetToLocation(sourceCode);
|
||||
});
|
||||
const entries = srcMap.split(';');
|
||||
let lastParsedEntry: SourceLocation = {} as any;
|
||||
const instructionIndexToSourceRange: { [instructionIndex: number]: SourceRange } = {};
|
||||
@ -66,14 +69,18 @@ export function parseSourceMap(
|
||||
length,
|
||||
fileIndex,
|
||||
};
|
||||
if (parsedEntry.fileIndex !== -1 && !_.isUndefined(locationByOffsetByFileIndex[parsedEntry.fileIndex])) {
|
||||
if (parsedEntry.fileIndex !== -1 && !_.isUndefined(fileIndexToOffsetToLocation[parsedEntry.fileIndex])) {
|
||||
const offsetToLocation = fileIndexToOffsetToLocation[parsedEntry.fileIndex];
|
||||
const sourceRange = {
|
||||
location: {
|
||||
start: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset],
|
||||
end: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset + parsedEntry.length],
|
||||
start: offsetToLocation[parsedEntry.offset],
|
||||
end: offsetToLocation[parsedEntry.offset + parsedEntry.length],
|
||||
},
|
||||
fileName: sources[parsedEntry.fileIndex],
|
||||
};
|
||||
if (sourceRange.location.start === undefined || sourceRange.location.end === undefined) {
|
||||
throw new Error(`Error while processing sourcemap: location out of range in ${sourceRange.fileName}`);
|
||||
}
|
||||
instructionIndexToSourceRange[i] = sourceRange;
|
||||
} else {
|
||||
// Some assembly code generated by Solidity can't be mapped back to a line of source code.
|
||||
|
@ -4,21 +4,21 @@ import * as _ from 'lodash';
|
||||
|
||||
import { utils } from './utils';
|
||||
|
||||
export interface TraceByContractAddress {
|
||||
export interface ContractAddressToTraces {
|
||||
[contractAddress: string]: StructLog[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts linear stack trace to `TraceByContractAddress`.
|
||||
* Converts linear stack trace to `ContractAddressToTraces`.
|
||||
* @param structLogs stack trace
|
||||
* @param startAddress initial context address
|
||||
*/
|
||||
export function getTracesByContractAddress(structLogs: StructLog[], startAddress: string): TraceByContractAddress {
|
||||
const traceByContractAddress: TraceByContractAddress = {};
|
||||
export function getContractAddressToTraces(structLogs: StructLog[], startAddress: string): ContractAddressToTraces {
|
||||
const contractAddressToTraces: ContractAddressToTraces = {};
|
||||
let currentTraceSegment = [];
|
||||
const addressStack = [startAddress];
|
||||
if (_.isEmpty(structLogs)) {
|
||||
return traceByContractAddress;
|
||||
return contractAddressToTraces;
|
||||
}
|
||||
const normalizedStructLogs = utils.normalizeStructLogs(structLogs);
|
||||
// tslint:disable-next-line:prefer-for-of
|
||||
@ -45,14 +45,14 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
|
||||
const nextStructLog = normalizedStructLogs[i + 1];
|
||||
if (nextStructLog.depth !== structLog.depth) {
|
||||
addressStack.push(newAddress);
|
||||
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
|
||||
contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
|
||||
currentTraceSegment,
|
||||
);
|
||||
currentTraceSegment = [];
|
||||
}
|
||||
} else if (utils.isEndOpcode(structLog.op)) {
|
||||
const currentAddress = addressStack.pop() as string;
|
||||
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
|
||||
contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
|
||||
currentTraceSegment,
|
||||
);
|
||||
currentTraceSegment = [];
|
||||
@ -71,7 +71,7 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
|
||||
logUtils.warn(
|
||||
"Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace",
|
||||
);
|
||||
return traceByContractAddress;
|
||||
return contractAddressToTraces;
|
||||
} else {
|
||||
if (structLog !== _.last(normalizedStructLogs)) {
|
||||
const nextStructLog = normalizedStructLogs[i + 1];
|
||||
@ -79,7 +79,7 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
|
||||
continue;
|
||||
} else if (nextStructLog.depth === structLog.depth - 1) {
|
||||
const currentAddress = addressStack.pop() as string;
|
||||
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
|
||||
contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
|
||||
currentTraceSegment,
|
||||
);
|
||||
currentTraceSegment = [];
|
||||
@ -94,11 +94,11 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
|
||||
}
|
||||
if (currentTraceSegment.length !== 0) {
|
||||
const currentAddress = addressStack.pop() as string;
|
||||
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat(
|
||||
contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
|
||||
currentTraceSegment,
|
||||
);
|
||||
currentTraceSegment = [];
|
||||
logUtils.warn('Malformed trace. Current trace segment non empty at the end');
|
||||
}
|
||||
return traceByContractAddress;
|
||||
return contractAddressToTraces;
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { BlockchainLifecycle } from '@0x/dev-utils';
|
||||
import { Callback, ErrorCallback, NextCallback, Subprovider } from '@0x/subproviders';
|
||||
import { logUtils } from '@0x/utils';
|
||||
import { CallDataRPC, marshaller, Web3Wrapper } from '@0x/web3-wrapper';
|
||||
import { JSONRPCRequestPayload, Provider, TxData } from 'ethereum-types';
|
||||
import * as _ from 'lodash';
|
||||
@ -20,6 +21,23 @@ export interface TraceCollectionSubproviderConfig {
|
||||
shouldCollectGasEstimateTraces: boolean;
|
||||
}
|
||||
|
||||
type AsyncFunc = (...args: any[]) => Promise<void>;
|
||||
|
||||
// HACK: This wrapper outputs errors to console even if the promise gets ignored
|
||||
// we need this because web3-provider-engine does not handle promises in
|
||||
// the after function of next(after).
|
||||
function logAsyncErrors(fn: AsyncFunc): AsyncFunc {
|
||||
async function wrappedAsync(...args: any[]): Promise<void> {
|
||||
try {
|
||||
await fn(...args);
|
||||
} catch (err) {
|
||||
logUtils.log(err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return wrappedAsync;
|
||||
}
|
||||
|
||||
// Because there is no notion of a call trace in the Ethereum rpc - we collect them in a rather non-obvious/hacky way.
|
||||
// On each call - we create a snapshot, execute the call as a transaction, get the trace, revert the snapshot.
|
||||
// That allows us to avoid influencing test behaviour.
|
||||
@ -74,7 +92,7 @@ export abstract class TraceCollectionSubprovider extends Subprovider {
|
||||
next();
|
||||
} else {
|
||||
const txData = payload.params[0];
|
||||
next(this._onTransactionSentAsync.bind(this, txData));
|
||||
next(logAsyncErrors(this._onTransactionSentAsync.bind(this, txData)));
|
||||
}
|
||||
return;
|
||||
|
||||
@ -83,7 +101,7 @@ export abstract class TraceCollectionSubprovider extends Subprovider {
|
||||
next();
|
||||
} else {
|
||||
const callData = payload.params[0];
|
||||
next(this._onCallOrGasEstimateExecutedAsync.bind(this, callData));
|
||||
next(logAsyncErrors(this._onCallOrGasEstimateExecutedAsync.bind(this, callData)));
|
||||
}
|
||||
return;
|
||||
|
||||
@ -92,7 +110,7 @@ export abstract class TraceCollectionSubprovider extends Subprovider {
|
||||
next();
|
||||
} else {
|
||||
const estimateGasData = payload.params[0];
|
||||
next(this._onCallOrGasEstimateExecutedAsync.bind(this, estimateGasData));
|
||||
next(logAsyncErrors(this._onCallOrGasEstimateExecutedAsync.bind(this, estimateGasData)));
|
||||
}
|
||||
return;
|
||||
|
||||
|
@ -56,7 +56,7 @@ export class TraceCollector {
|
||||
this._singleFileSubtraceHandler = singleFileSubtraceHandler;
|
||||
}
|
||||
public async writeOutputAsync(): Promise<void> {
|
||||
const finalCoverage = this._collector.getFinalCoverage();
|
||||
const finalCoverage: Coverage = this._collector.getFinalCoverage();
|
||||
const stringifiedCoverage = JSON.stringify(finalCoverage, null, '\t');
|
||||
await mkdirpAsync('coverage');
|
||||
fs.writeFileSync('coverage/coverage.json', stringifiedCoverage);
|
||||
@ -80,14 +80,14 @@ export class TraceCollector {
|
||||
const bytecodeHex = stripHexPrefix(bytecode);
|
||||
const sourceMap = isContractCreation ? contractData.sourceMap : contractData.sourceMapRuntime;
|
||||
const pcToSourceRange = parseSourceMap(contractData.sourceCodes, sourceMap, bytecodeHex, contractData.sources);
|
||||
for (let fileIndex = 0; fileIndex < contractData.sources.length; fileIndex++) {
|
||||
_.map(contractData.sources, (_sourcePath: string, fileIndex: string) => {
|
||||
const singleFileCoverageForTrace = this._singleFileSubtraceHandler(
|
||||
contractData,
|
||||
traceInfo.subtrace,
|
||||
pcToSourceRange,
|
||||
fileIndex,
|
||||
_.parseInt(fileIndex),
|
||||
);
|
||||
this._collector.add(singleFileCoverageForTrace);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
import { NodeType } from '@0x/web3-wrapper';
|
||||
import * as _ from 'lodash';
|
||||
|
||||
import { constants } from './constants';
|
||||
import { getTracesByContractAddress } from './trace';
|
||||
import { getContractAddressToTraces } from './trace';
|
||||
import { TraceCollectionSubprovider } from './trace_collection_subprovider';
|
||||
import { TraceInfo, TraceInfoExistingContract, TraceInfoNewContract } from './types';
|
||||
|
||||
@ -12,18 +13,50 @@ export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
|
||||
protected abstract _handleTraceInfoAsync(traceInfo: TraceInfo): Promise<void>;
|
||||
protected async _recordTxTraceAsync(address: string, data: string | undefined, txHash: string): Promise<void> {
|
||||
await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0);
|
||||
const trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, {
|
||||
disableMemory: true,
|
||||
disableStack: false,
|
||||
disableStorage: true,
|
||||
});
|
||||
const tracesByContractAddress = getTracesByContractAddress(trace.structLogs, address);
|
||||
const subcallAddresses = _.keys(tracesByContractAddress);
|
||||
const nodeType = await this._web3Wrapper.getNodeTypeAsync();
|
||||
let trace;
|
||||
if (nodeType === NodeType.Geth) {
|
||||
// For very large traces we use a custom tracer that outputs a format compatible with a
|
||||
// regular trace. We only need the 2nd item on the stack when the instruction is a call.
|
||||
// By not including other stack values, we drastically limit the amount of data to be collected.
|
||||
// There are no good docs about how to write those tracers, but you can find some example ones here:
|
||||
// https://github.com/ethereum/go-ethereum/tree/master/eth/tracers/internal/tracers
|
||||
const tracer = `
|
||||
{
|
||||
data: [],
|
||||
step: function(log) {
|
||||
const op = log.op.toString();
|
||||
const opn = 0 | log.op.toNumber();
|
||||
const pc = 0 | log.getPC();
|
||||
const depth = 0 | log.getDepth();
|
||||
const gasCost = 0 | log.getCost();
|
||||
const gas = 0 | log.getGas();
|
||||
const isCall = opn == 0xf1 || opn == 0xf2 || opn == 0xf4 || opn == 0xf5;
|
||||
const stack = isCall ? ['0x'+log.stack.peek(1).toString(16), null] : null;
|
||||
this.data.push({ pc, gasCost, depth, op, stack, gas });
|
||||
},
|
||||
fault: function() { },
|
||||
result: function() { return {structLogs: this.data}; }
|
||||
}
|
||||
`;
|
||||
trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, { tracer, timeout: '600s' });
|
||||
} else {
|
||||
/**
|
||||
* Ganache doesn't support custom tracers yet.
|
||||
*/
|
||||
trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, {
|
||||
disableMemory: true,
|
||||
disableStack: false,
|
||||
disableStorage: true,
|
||||
});
|
||||
}
|
||||
const contractAddressToTraces = getContractAddressToTraces(trace.structLogs, address);
|
||||
const subcallAddresses = _.keys(contractAddressToTraces);
|
||||
if (address === constants.NEW_CONTRACT) {
|
||||
for (const subcallAddress of subcallAddresses) {
|
||||
let traceInfo: TraceInfoNewContract | TraceInfoExistingContract;
|
||||
if (subcallAddress === 'NEW_CONTRACT') {
|
||||
const traceForThatSubcall = tracesByContractAddress[subcallAddress];
|
||||
const traceForThatSubcall = contractAddressToTraces[subcallAddress];
|
||||
traceInfo = {
|
||||
subtrace: traceForThatSubcall,
|
||||
txHash,
|
||||
@ -32,7 +65,7 @@ export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
|
||||
};
|
||||
} else {
|
||||
const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress);
|
||||
const traceForThatSubcall = tracesByContractAddress[subcallAddress];
|
||||
const traceForThatSubcall = contractAddressToTraces[subcallAddress];
|
||||
traceInfo = {
|
||||
subtrace: traceForThatSubcall,
|
||||
txHash,
|
||||
@ -45,7 +78,7 @@ export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
|
||||
} else {
|
||||
for (const subcallAddress of subcallAddresses) {
|
||||
const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress);
|
||||
const traceForThatSubcall = tracesByContractAddress[subcallAddress];
|
||||
const traceForThatSubcall = contractAddressToTraces[subcallAddress];
|
||||
const traceInfo: TraceInfoExistingContract = {
|
||||
subtrace: traceForThatSubcall,
|
||||
txHash,
|
||||
|
@ -16,7 +16,7 @@ export interface SingleFileSourceRange {
|
||||
end: LineColumn;
|
||||
}
|
||||
|
||||
export interface LocationByOffset {
|
||||
export interface OffsetToLocation {
|
||||
[offset: number]: LineColumn;
|
||||
}
|
||||
|
||||
@ -76,13 +76,20 @@ export interface Coverage {
|
||||
};
|
||||
}
|
||||
|
||||
export interface SourceCodes {
|
||||
[sourceId: number]: string;
|
||||
}
|
||||
export interface Sources {
|
||||
[sourceId: number]: string;
|
||||
}
|
||||
|
||||
export interface ContractData {
|
||||
bytecode: string;
|
||||
sourceMap: string;
|
||||
runtimeBytecode: string;
|
||||
sourceMapRuntime: string;
|
||||
sourceCodes: string[];
|
||||
sources: string[];
|
||||
sourceCodes: SourceCodes;
|
||||
sources: Sources;
|
||||
}
|
||||
|
||||
// Part of the trace executed within the same context
|
||||
|
@ -23,6 +23,12 @@ export const utils = {
|
||||
utils.compareLineColumn(childRange.end, parentRange.end) <= 0
|
||||
);
|
||||
},
|
||||
isRangeEqual(childRange: SingleFileSourceRange, parentRange: SingleFileSourceRange): boolean {
|
||||
return (
|
||||
utils.compareLineColumn(parentRange.start, childRange.start) === 0 &&
|
||||
utils.compareLineColumn(childRange.end, parentRange.end) === 0
|
||||
);
|
||||
},
|
||||
bytecodeToBytecodeRegex(bytecode: string): string {
|
||||
const bytecodeRegex = bytecode
|
||||
// Library linking placeholder: __ConvertLib____________________________
|
||||
|
@ -4,7 +4,7 @@ import * as _ from 'lodash';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
|
||||
import { getLocationByOffset, parseSourceMap } from '../src/source_maps';
|
||||
import { getOffsetToLocation, parseSourceMap } from '../src/source_maps';
|
||||
|
||||
const expect = chai.expect;
|
||||
|
||||
@ -15,7 +15,7 @@ const simplestContract = fs.readFileSync(simplestContractFileName).toString();
|
||||
describe('source maps', () => {
|
||||
describe('#getLocationByOffset', () => {
|
||||
it('correctly computes location by offset', () => {
|
||||
const locationByOffset = getLocationByOffset(simplestContract);
|
||||
const offsetToLocation = getOffsetToLocation(simplestContract);
|
||||
const expectedLocationByOffset = {
|
||||
'0': { line: 1, column: 0 },
|
||||
'1': { line: 1, column: 1 },
|
||||
@ -41,7 +41,7 @@ describe('source maps', () => {
|
||||
'21': { line: 2, column: 1 },
|
||||
'22': { line: 3, column: 0 },
|
||||
};
|
||||
expect(locationByOffset).to.be.deep.equal(expectedLocationByOffset);
|
||||
expect(offsetToLocation).to.be.deep.equal(expectedLocationByOffset);
|
||||
});
|
||||
});
|
||||
describe('#parseSourceMap', () => {
|
||||
|
@ -3,7 +3,7 @@ import { OpCode, StructLog } from 'ethereum-types';
|
||||
import * as _ from 'lodash';
|
||||
import 'mocha';
|
||||
|
||||
import { getTracesByContractAddress } from '../src/trace';
|
||||
import { getContractAddressToTraces } from '../src/trace';
|
||||
|
||||
const expect = chai.expect;
|
||||
|
||||
@ -44,7 +44,7 @@ describe('Trace', () => {
|
||||
];
|
||||
const fullTrace = _.map(trace, compactStructLog => addDefaultStructLogFields(compactStructLog));
|
||||
const startAddress = '0x0000000000000000000000000000000000000001';
|
||||
const traceByContractAddress = getTracesByContractAddress(fullTrace, startAddress);
|
||||
const traceByContractAddress = getContractAddressToTraces(fullTrace, startAddress);
|
||||
const expectedTraceByContractAddress = {
|
||||
[startAddress]: [fullTrace[0], fullTrace[2]],
|
||||
[delegateCallAddress]: [fullTrace[1]],
|
||||
|
Loading…
x
Reference in New Issue
Block a user