Merge pull request #1498 from 0xProject/fix/sol-cov

Sol tracing fixes
This commit is contained in:
Leonid Logvinov
2019-01-15 14:45:02 +01:00
committed by GitHub
23 changed files with 226 additions and 81 deletions

View File

@@ -8,7 +8,7 @@
"eip158Block": 3, "eip158Block": 3,
"byzantiumBlock": 4, "byzantiumBlock": 4,
"clique": { "clique": {
"period": 0, "period": 1,
"epoch": 30000 "epoch": 30000
} }
}, },

View File

@@ -306,6 +306,8 @@ export interface TraceParams {
disableMemory?: boolean; disableMemory?: boolean;
disableStack?: boolean; disableStack?: boolean;
disableStorage?: boolean; disableStorage?: boolean;
tracer?: string;
timeout?: string;
} }
export type OutputField = export type OutputField =

View File

@@ -74,19 +74,19 @@ export const coverageHandler: SingleFileSubtraceHandler = (
let sourceRanges = _.map(subtrace, structLog => pcToSourceRange[structLog.pc]); let sourceRanges = _.map(subtrace, structLog => pcToSourceRange[structLog.pc]);
sourceRanges = _.compact(sourceRanges); // Some PC's don't map to a source range and we just ignore them. sourceRanges = _.compact(sourceRanges); // Some PC's don't map to a source range and we just ignore them.
// By default lodash does a shallow object comparasion. We JSON.stringify them and compare as strings. // By default lodash does a shallow object comparison. We JSON.stringify them and compare as strings.
sourceRanges = _.uniqBy(sourceRanges, s => JSON.stringify(s)); // We don't care if one PC was covered multiple times within a single transaction sourceRanges = _.uniqBy(sourceRanges, s => JSON.stringify(s)); // We don't care if one PC was covered multiple times within a single transaction
sourceRanges = _.filter(sourceRanges, sourceRange => sourceRange.fileName === absoluteFileName); sourceRanges = _.filter(sourceRanges, sourceRange => sourceRange.fileName === absoluteFileName);
const branchCoverage: BranchCoverage = {}; const branchCoverage: BranchCoverage = {};
const branchIds = _.keys(coverageEntriesDescription.branchMap); const branchIds = _.keys(coverageEntriesDescription.branchMap);
for (const branchId of branchIds) { for (const branchId of branchIds) {
const branchDescription = coverageEntriesDescription.branchMap[branchId]; const branchDescription = coverageEntriesDescription.branchMap[branchId];
const isBranchCoveredByBranchIndex = _.map(branchDescription.locations, location => { const branchIndexToIsBranchCovered = _.map(branchDescription.locations, location => {
const isBranchCovered = _.some(sourceRanges, range => utils.isRangeInside(range.location, location)); const isBranchCovered = _.some(sourceRanges, range => utils.isRangeInside(range.location, location));
const timesBranchCovered = Number(isBranchCovered); const timesBranchCovered = Number(isBranchCovered);
return timesBranchCovered; return timesBranchCovered;
}); });
branchCoverage[branchId] = isBranchCoveredByBranchIndex; branchCoverage[branchId] = branchIndexToIsBranchCovered;
} }
const statementCoverage: StatementCoverage = {}; const statementCoverage: StatementCoverage = {};
const statementIds = _.keys(coverageEntriesDescription.statementMap); const statementIds = _.keys(coverageEntriesDescription.statementMap);

View File

@@ -4,6 +4,8 @@ export {
TruffleArtifactAdapter, TruffleArtifactAdapter,
AbstractArtifactAdapter, AbstractArtifactAdapter,
ContractData, ContractData,
SourceCodes,
Sources,
} from '@0x/sol-tracing-utils'; } from '@0x/sol-tracing-utils';
export { export {

View File

@@ -3,6 +3,8 @@ export {
SolCompilerArtifactAdapter, SolCompilerArtifactAdapter,
TruffleArtifactAdapter, TruffleArtifactAdapter,
ContractData, ContractData,
SourceCodes,
Sources,
} from '@0x/sol-tracing-utils'; } from '@0x/sol-tracing-utils';
// HACK: ProfilerSubprovider is a hacky way to do profiling using coverage tools. Not production ready // HACK: ProfilerSubprovider is a hacky way to do profiling using coverage tools. Not production ready

View File

@@ -63,7 +63,7 @@ export const profilerHandler: SingleFileSubtraceHandler = (
): Coverage => { ): Coverage => {
const absoluteFileName = contractData.sources[fileIndex]; const absoluteFileName = contractData.sources[fileIndex];
const profilerEntriesDescription = collectCoverageEntries(contractData.sourceCodes[fileIndex]); const profilerEntriesDescription = collectCoverageEntries(contractData.sourceCodes[fileIndex]);
const gasConsumedByStatement: { [statementId: string]: number } = {}; const statementToGasConsumed: { [statementId: string]: number } = {};
const statementIds = _.keys(profilerEntriesDescription.statementMap); const statementIds = _.keys(profilerEntriesDescription.statementMap);
for (const statementId of statementIds) { for (const statementId of statementIds) {
const statementDescription = profilerEntriesDescription.statementMap[statementId]; const statementDescription = profilerEntriesDescription.statementMap[statementId];
@@ -83,14 +83,14 @@ export const profilerHandler: SingleFileSubtraceHandler = (
} }
}), }),
); );
gasConsumedByStatement[statementId] = totalGasCost; statementToGasConsumed[statementId] = totalGasCost;
} }
const partialProfilerOutput = { const partialProfilerOutput = {
[absoluteFileName]: { [absoluteFileName]: {
...profilerEntriesDescription, ...profilerEntriesDescription,
path: absoluteFileName, path: absoluteFileName,
f: {}, // I's meaningless in profiling context f: {}, // I's meaningless in profiling context
s: gasConsumedByStatement, s: statementToGasConsumed,
b: {}, // I's meaningless in profiling context b: {}, // I's meaningless in profiling context
}, },
}; };

View File

@@ -3,6 +3,8 @@ export {
TruffleArtifactAdapter, TruffleArtifactAdapter,
SolCompilerArtifactAdapter, SolCompilerArtifactAdapter,
ContractData, ContractData,
SourceCodes,
Sources,
} from '@0x/sol-tracing-utils'; } from '@0x/sol-tracing-utils';
export { RevertTraceSubprovider } from './revert_trace_subprovider'; export { RevertTraceSubprovider } from './revert_trace_subprovider';

View File

@@ -106,7 +106,8 @@ export class RevertTraceSubprovider extends TraceCollectionSubprovider {
continue; continue;
} }
const fileIndex = contractData.sources.indexOf(sourceRange.fileName); const fileNameToFileIndex = _.invert(contractData.sources);
const fileIndex = _.parseInt(fileNameToFileIndex[sourceRange.fileName]);
const sourceSnippet = getSourceRangeSnippet(sourceRange, contractData.sourceCodes[fileIndex]); const sourceSnippet = getSourceRangeSnippet(sourceRange, contractData.sourceCodes[fileIndex]);
if (sourceSnippet !== null) { if (sourceSnippet !== null) {
sourceSnippets.push(sourceSnippet); sourceSnippets.push(sourceSnippet);

View File

@@ -1,4 +1,29 @@
[ [
{
"version": "4.0.0",
"changes": [
{
"note": "Fix a bug with incorrect parsing of `sourceMaps` due to sources being in an array instead of a map",
"pr": 1498
},
{
"note": "Change the types of `ContractData.sources` and `ContractData.sourceCodes` to be objects instead of arrays",
"pr": 1498
},
{
"note": "Use custom JS tracer to speed up tracing on clients that support it (e.g., Geth)",
"pr": 1498
},
{
"note": "Log errors encountered in `TraceCollectionSubprovider`",
"pr": 1498
},
{
"note": "Add support for assembly statements",
"pr": 1498
}
]
},
{ {
"version": "3.0.0", "version": "3.0.0",
"changes": [ "changes": [

View File

@@ -5,7 +5,7 @@ import * as glob from 'glob';
import * as _ from 'lodash'; import * as _ from 'lodash';
import * as path from 'path'; import * as path from 'path';
import { ContractData } from '../types'; import { ContractData, SourceCodes, Sources } from '../types';
import { AbstractArtifactAdapter } from './abstract_artifact_adapter'; import { AbstractArtifactAdapter } from './abstract_artifact_adapter';
@@ -43,9 +43,14 @@ export class SolCompilerArtifactAdapter extends AbstractArtifactAdapter {
logUtils.warn(`${artifactFileName} doesn't contain bytecode. Skipping...`); logUtils.warn(`${artifactFileName} doesn't contain bytecode. Skipping...`);
continue; continue;
} }
let sources = _.keys(artifact.sources); const sources: Sources = {};
sources = _.map(sources, relativeFilePath => path.resolve(this._sourcesPath, relativeFilePath)); const sourceCodes: SourceCodes = {};
const sourceCodes = _.map(sources, (source: string) => fs.readFileSync(source).toString()); _.map(artifact.sources, (value: { id: number }, relativeFilePath: string) => {
const filePath = path.resolve(this._sourcesPath, relativeFilePath);
const fileContent = fs.readFileSync(filePath).toString();
sources[value.id] = filePath;
sourceCodes[value.id] = fileContent;
});
const contractData = { const contractData = {
sourceCodes, sourceCodes,
sources, sources,

View File

@@ -1,7 +1,7 @@
import * as _ from 'lodash'; import * as _ from 'lodash';
import * as Parser from 'solidity-parser-antlr'; import * as Parser from 'solidity-parser-antlr';
import { BranchMap, FnMap, LocationByOffset, SingleFileSourceRange, StatementMap } from './types'; import { BranchMap, FnMap, OffsetToLocation, SingleFileSourceRange, StatementMap } from './types';
export interface CoverageEntriesDescription { export interface CoverageEntriesDescription {
fnMap: FnMap; fnMap: FnMap;
@@ -22,13 +22,13 @@ export class ASTVisitor {
private readonly _branchMap: BranchMap = {}; private readonly _branchMap: BranchMap = {};
private readonly _modifiersStatementIds: number[] = []; private readonly _modifiersStatementIds: number[] = [];
private readonly _statementMap: StatementMap = {}; private readonly _statementMap: StatementMap = {};
private readonly _locationByOffset: LocationByOffset; private readonly _offsetToLocation: OffsetToLocation;
private readonly _ignoreRangesBeginningAt: number[]; private readonly _ignoreRangesBeginningAt: number[];
// keep track of contract/function ranges that are to be ignored // keep track of contract/function ranges that are to be ignored
// so we can also ignore any children nodes within the contract/function // so we can also ignore any children nodes within the contract/function
private readonly _ignoreRangesWithin: Array<[number, number]> = []; private readonly _ignoreRangesWithin: Array<[number, number]> = [];
constructor(locationByOffset: LocationByOffset, ignoreRangesBeginningAt: number[] = []) { constructor(offsetToLocation: OffsetToLocation, ignoreRangesBeginningAt: number[] = []) {
this._locationByOffset = locationByOffset; this._offsetToLocation = offsetToLocation;
this._ignoreRangesBeginningAt = ignoreRangesBeginningAt; this._ignoreRangesBeginningAt = ignoreRangesBeginningAt;
} }
public getCollectedCoverageEntries(): CoverageEntriesDescription { public getCollectedCoverageEntries(): CoverageEntriesDescription {
@@ -94,6 +94,39 @@ export class ASTVisitor {
public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void { public InlineAssemblyStatement(ast: Parser.InlineAssemblyStatement): void {
this._visitStatement(ast); this._visitStatement(ast);
} }
public AssemblyLocalDefinition(ast: Parser.AssemblyLocalDefinition): void {
this._visitStatement(ast);
}
public AssemblyCall(ast: Parser.AssemblyCall): void {
this._visitStatement(ast);
}
public AssemblyIf(ast: Parser.AssemblyIf): void {
this._visitStatement(ast);
}
public AssemblyBlock(ast: Parser.AssemblyBlock): void {
this._visitStatement(ast);
}
public AssemblyExpression(ast: Parser.AssemblyExpression): void {
this._visitStatement(ast);
}
public AssemblyAssignment(ast: Parser.AssemblyAssignment): void {
this._visitStatement(ast);
}
public LabelDefinition(ast: Parser.LabelDefinition): void {
this._visitStatement(ast);
}
public AssemblySwitch(ast: Parser.AssemblySwitch): void {
this._visitStatement(ast);
}
public AssemblyFunctionDefinition(ast: Parser.AssemblyFunctionDefinition): void {
this._visitStatement(ast);
}
public AssemblyFor(ast: Parser.AssemblyFor): void {
this._visitStatement(ast);
}
public SubAssembly(ast: Parser.SubAssembly): void {
this._visitStatement(ast);
}
public BinaryOperation(ast: Parser.BinaryOperation): void { public BinaryOperation(ast: Parser.BinaryOperation): void {
const BRANCHING_BIN_OPS = ['&&', '||']; const BRANCHING_BIN_OPS = ['&&', '||'];
if (_.includes(BRANCHING_BIN_OPS, ast.operator)) { if (_.includes(BRANCHING_BIN_OPS, ast.operator)) {
@@ -136,8 +169,8 @@ export class ASTVisitor {
} }
private _getExpressionRange(ast: Parser.ASTNode): SingleFileSourceRange { private _getExpressionRange(ast: Parser.ASTNode): SingleFileSourceRange {
const astRange = ast.range as [number, number]; const astRange = ast.range as [number, number];
const start = this._locationByOffset[astRange[0]]; const start = this._offsetToLocation[astRange[0]];
const end = this._locationByOffset[astRange[1] + 1]; const end = this._offsetToLocation[astRange[1] + 1];
const range = { const range = {
start, start,
end, end,

View File

@@ -3,24 +3,24 @@ import * as _ from 'lodash';
import * as parser from 'solidity-parser-antlr'; import * as parser from 'solidity-parser-antlr';
import { ASTVisitor, CoverageEntriesDescription } from './ast_visitor'; import { ASTVisitor, CoverageEntriesDescription } from './ast_visitor';
import { getLocationByOffset } from './source_maps'; import { getOffsetToLocation } from './source_maps';
const IGNORE_RE = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm; const IGNORE_RE = /\/\*\s*solcov\s+ignore\s+next\s*\*\/\s*/gm;
// Parsing source code for each transaction/code is slow and therefore we cache it // Parsing source code for each transaction/code is slow and therefore we cache it
const coverageEntriesBySourceHash: { [sourceHash: string]: CoverageEntriesDescription } = {}; const sourceHashToCoverageEntries: { [sourceHash: string]: CoverageEntriesDescription } = {};
export const collectCoverageEntries = (contractSource: string) => { export const collectCoverageEntries = (contractSource: string) => {
const sourceHash = ethUtil.sha3(contractSource).toString('hex'); const sourceHash = ethUtil.sha3(contractSource).toString('hex');
if (_.isUndefined(coverageEntriesBySourceHash[sourceHash]) && !_.isUndefined(contractSource)) { if (_.isUndefined(sourceHashToCoverageEntries[sourceHash]) && !_.isUndefined(contractSource)) {
const ast = parser.parse(contractSource, { range: true }); const ast = parser.parse(contractSource, { range: true });
const locationByOffset = getLocationByOffset(contractSource); const offsetToLocation = getOffsetToLocation(contractSource);
const ignoreRangesBegingingAt = gatherRangesToIgnore(contractSource); const ignoreRangesBegingingAt = gatherRangesToIgnore(contractSource);
const visitor = new ASTVisitor(locationByOffset, ignoreRangesBegingingAt); const visitor = new ASTVisitor(offsetToLocation, ignoreRangesBegingingAt);
parser.visit(ast, visitor); parser.visit(ast, visitor);
coverageEntriesBySourceHash[sourceHash] = visitor.getCollectedCoverageEntries(); sourceHashToCoverageEntries[sourceHash] = visitor.getCollectedCoverageEntries();
} }
const coverageEntriesDescription = coverageEntriesBySourceHash[sourceHash]; const coverageEntriesDescription = sourceHashToCoverageEntries[sourceHash];
return coverageEntriesDescription; return coverageEntriesDescription;
}; };

View File

@@ -13,7 +13,7 @@ interface ASTInfo {
} }
// Parsing source code for each transaction/code is slow and therefore we cache it // Parsing source code for each transaction/code is slow and therefore we cache it
const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {}; const hashToParsedSource: { [sourceHash: string]: Parser.ASTNode } = {};
/** /**
* Gets the source range snippet by source range to be used by revert trace. * Gets the source range snippet by source range to be used by revert trace.
@@ -22,10 +22,10 @@ const parsedSourceByHash: { [sourceHash: string]: Parser.ASTNode } = {};
*/ */
export function getSourceRangeSnippet(sourceRange: SourceRange, sourceCode: string): SourceSnippet | null { export function getSourceRangeSnippet(sourceRange: SourceRange, sourceCode: string): SourceSnippet | null {
const sourceHash = ethUtil.sha3(sourceCode).toString('hex'); const sourceHash = ethUtil.sha3(sourceCode).toString('hex');
if (_.isUndefined(parsedSourceByHash[sourceHash])) { if (_.isUndefined(hashToParsedSource[sourceHash])) {
parsedSourceByHash[sourceHash] = Parser.parse(sourceCode, { loc: true }); hashToParsedSource[sourceHash] = Parser.parse(sourceCode, { loc: true });
} }
const astNode = parsedSourceByHash[sourceHash]; const astNode = hashToParsedSource[sourceHash];
const visitor = new ASTInfoVisitor(); const visitor = new ASTInfoVisitor();
Parser.visit(astNode, visitor); Parser.visit(astNode, visitor);
const astInfo = visitor.getASTInfoForRange(sourceRange); const astInfo = visitor.getASTInfoForRange(sourceRange);

View File

@@ -22,11 +22,13 @@ export {
BranchMap, BranchMap,
EvmCallStackEntry, EvmCallStackEntry,
FnMap, FnMap,
LocationByOffset, OffsetToLocation,
StatementMap, StatementMap,
TraceInfoBase, TraceInfoBase,
TraceInfoExistingContract, TraceInfoExistingContract,
TraceInfoNewContract, TraceInfoNewContract,
Sources,
SourceCodes,
} from './types'; } from './types';
export { collectCoverageEntries } from './collect_coverage_entries'; export { collectCoverageEntries } from './collect_coverage_entries';
export { TraceCollector, SingleFileSubtraceHandler } from './trace_collector'; export { TraceCollector, SingleFileSubtraceHandler } from './trace_collector';

View File

@@ -1,7 +1,7 @@
import * as _ from 'lodash'; import * as _ from 'lodash';
import { getPcToInstructionIndexMapping } from './instructions'; import { getPcToInstructionIndexMapping } from './instructions';
import { LocationByOffset, SourceRange } from './types'; import { OffsetToLocation, SourceCodes, SourceRange, Sources } from './types';
const RADIX = 10; const RADIX = 10;
@@ -15,38 +15,41 @@ export interface SourceLocation {
* Receives a string with newlines and returns a map of byte offset to LineColumn * Receives a string with newlines and returns a map of byte offset to LineColumn
* @param str A string to process * @param str A string to process
*/ */
export function getLocationByOffset(str: string): LocationByOffset { export function getOffsetToLocation(str: string): OffsetToLocation {
const locationByOffset: LocationByOffset = { 0: { line: 1, column: 0 } }; const offsetToLocation: OffsetToLocation = { 0: { line: 1, column: 0 } };
let currentOffset = 0; let currentOffset = 0;
for (const char of str.split('')) { for (const char of str.split('')) {
const location = locationByOffset[currentOffset]; const location = offsetToLocation[currentOffset];
const isNewline = char === '\n'; const isNewline = char === '\n';
locationByOffset[currentOffset + 1] = { offsetToLocation[currentOffset + 1] = {
line: location.line + (isNewline ? 1 : 0), line: location.line + (isNewline ? 1 : 0),
column: isNewline ? 0 : location.column + 1, column: isNewline ? 0 : location.column + 1,
}; };
currentOffset++; currentOffset++;
} }
return locationByOffset; return offsetToLocation;
} }
/** /**
* Parses a sourcemap string. * Parses a sourcemap string.
* The solidity sourcemap format is documented here: https://github.com/ethereum/solidity/blob/develop/docs/miscellaneous.rst#source-mappings * The solidity sourcemap format is documented here: https://github.com/ethereum/solidity/blob/develop/docs/miscellaneous.rst#source-mappings
* @param sourceCodes sources contents * @param indexToSourceCode index to source code
* @param srcMap source map string * @param srcMap source map string
* @param bytecodeHex contract bytecode * @param bytecodeHex contract bytecode
* @param sources sources file names * @param indexToSource index to source file path
*/ */
export function parseSourceMap( export function parseSourceMap(
sourceCodes: string[], sourceCodes: SourceCodes,
srcMap: string, srcMap: string,
bytecodeHex: string, bytecodeHex: string,
sources: string[], sources: Sources,
): { [programCounter: number]: SourceRange } { ): { [programCounter: number]: SourceRange } {
const bytecode = Uint8Array.from(Buffer.from(bytecodeHex, 'hex')); const bytecode = Uint8Array.from(Buffer.from(bytecodeHex, 'hex'));
const pcToInstructionIndex: { [programCounter: number]: number } = getPcToInstructionIndexMapping(bytecode); const pcToInstructionIndex: { [programCounter: number]: number } = getPcToInstructionIndexMapping(bytecode);
const locationByOffsetByFileIndex = _.map(sourceCodes, s => (_.isUndefined(s) ? {} : getLocationByOffset(s))); const fileIndexToOffsetToLocation: { [fileIndex: number]: OffsetToLocation } = {};
_.map(sourceCodes, (sourceCode: string, fileIndex: number) => {
fileIndexToOffsetToLocation[fileIndex] = _.isUndefined(sourceCode) ? {} : getOffsetToLocation(sourceCode);
});
const entries = srcMap.split(';'); const entries = srcMap.split(';');
let lastParsedEntry: SourceLocation = {} as any; let lastParsedEntry: SourceLocation = {} as any;
const instructionIndexToSourceRange: { [instructionIndex: number]: SourceRange } = {}; const instructionIndexToSourceRange: { [instructionIndex: number]: SourceRange } = {};
@@ -66,14 +69,18 @@ export function parseSourceMap(
length, length,
fileIndex, fileIndex,
}; };
if (parsedEntry.fileIndex !== -1 && !_.isUndefined(locationByOffsetByFileIndex[parsedEntry.fileIndex])) { if (parsedEntry.fileIndex !== -1 && !_.isUndefined(fileIndexToOffsetToLocation[parsedEntry.fileIndex])) {
const offsetToLocation = fileIndexToOffsetToLocation[parsedEntry.fileIndex];
const sourceRange = { const sourceRange = {
location: { location: {
start: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset], start: offsetToLocation[parsedEntry.offset],
end: locationByOffsetByFileIndex[parsedEntry.fileIndex][parsedEntry.offset + parsedEntry.length], end: offsetToLocation[parsedEntry.offset + parsedEntry.length],
}, },
fileName: sources[parsedEntry.fileIndex], fileName: sources[parsedEntry.fileIndex],
}; };
if (sourceRange.location.start === undefined || sourceRange.location.end === undefined) {
throw new Error(`Error while processing sourcemap: location out of range in ${sourceRange.fileName}`);
}
instructionIndexToSourceRange[i] = sourceRange; instructionIndexToSourceRange[i] = sourceRange;
} else { } else {
// Some assembly code generated by Solidity can't be mapped back to a line of source code. // Some assembly code generated by Solidity can't be mapped back to a line of source code.

View File

@@ -4,21 +4,21 @@ import * as _ from 'lodash';
import { utils } from './utils'; import { utils } from './utils';
export interface TraceByContractAddress { export interface ContractAddressToTraces {
[contractAddress: string]: StructLog[]; [contractAddress: string]: StructLog[];
} }
/** /**
* Converts linear stack trace to `TraceByContractAddress`. * Converts linear stack trace to `ContractAddressToTraces`.
* @param structLogs stack trace * @param structLogs stack trace
* @param startAddress initial context address * @param startAddress initial context address
*/ */
export function getTracesByContractAddress(structLogs: StructLog[], startAddress: string): TraceByContractAddress { export function getContractAddressToTraces(structLogs: StructLog[], startAddress: string): ContractAddressToTraces {
const traceByContractAddress: TraceByContractAddress = {}; const contractAddressToTraces: ContractAddressToTraces = {};
let currentTraceSegment = []; let currentTraceSegment = [];
const addressStack = [startAddress]; const addressStack = [startAddress];
if (_.isEmpty(structLogs)) { if (_.isEmpty(structLogs)) {
return traceByContractAddress; return contractAddressToTraces;
} }
const normalizedStructLogs = utils.normalizeStructLogs(structLogs); const normalizedStructLogs = utils.normalizeStructLogs(structLogs);
// tslint:disable-next-line:prefer-for-of // tslint:disable-next-line:prefer-for-of
@@ -45,14 +45,14 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
const nextStructLog = normalizedStructLogs[i + 1]; const nextStructLog = normalizedStructLogs[i + 1];
if (nextStructLog.depth !== structLog.depth) { if (nextStructLog.depth !== structLog.depth) {
addressStack.push(newAddress); addressStack.push(newAddress);
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
currentTraceSegment, currentTraceSegment,
); );
currentTraceSegment = []; currentTraceSegment = [];
} }
} else if (utils.isEndOpcode(structLog.op)) { } else if (utils.isEndOpcode(structLog.op)) {
const currentAddress = addressStack.pop() as string; const currentAddress = addressStack.pop() as string;
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
currentTraceSegment, currentTraceSegment,
); );
currentTraceSegment = []; currentTraceSegment = [];
@@ -71,7 +71,7 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
logUtils.warn( logUtils.warn(
"Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace", "Detected a contract created from within another contract. We currently do not support that scenario. We'll just skip that trace",
); );
return traceByContractAddress; return contractAddressToTraces;
} else { } else {
if (structLog !== _.last(normalizedStructLogs)) { if (structLog !== _.last(normalizedStructLogs)) {
const nextStructLog = normalizedStructLogs[i + 1]; const nextStructLog = normalizedStructLogs[i + 1];
@@ -79,7 +79,7 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
continue; continue;
} else if (nextStructLog.depth === structLog.depth - 1) { } else if (nextStructLog.depth === structLog.depth - 1) {
const currentAddress = addressStack.pop() as string; const currentAddress = addressStack.pop() as string;
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
currentTraceSegment, currentTraceSegment,
); );
currentTraceSegment = []; currentTraceSegment = [];
@@ -94,11 +94,11 @@ export function getTracesByContractAddress(structLogs: StructLog[], startAddress
} }
if (currentTraceSegment.length !== 0) { if (currentTraceSegment.length !== 0) {
const currentAddress = addressStack.pop() as string; const currentAddress = addressStack.pop() as string;
traceByContractAddress[currentAddress] = (traceByContractAddress[currentAddress] || []).concat( contractAddressToTraces[currentAddress] = (contractAddressToTraces[currentAddress] || []).concat(
currentTraceSegment, currentTraceSegment,
); );
currentTraceSegment = []; currentTraceSegment = [];
logUtils.warn('Malformed trace. Current trace segment non empty at the end'); logUtils.warn('Malformed trace. Current trace segment non empty at the end');
} }
return traceByContractAddress; return contractAddressToTraces;
} }

View File

@@ -1,5 +1,6 @@
import { BlockchainLifecycle } from '@0x/dev-utils'; import { BlockchainLifecycle } from '@0x/dev-utils';
import { Callback, ErrorCallback, NextCallback, Subprovider } from '@0x/subproviders'; import { Callback, ErrorCallback, NextCallback, Subprovider } from '@0x/subproviders';
import { logUtils } from '@0x/utils';
import { CallDataRPC, marshaller, Web3Wrapper } from '@0x/web3-wrapper'; import { CallDataRPC, marshaller, Web3Wrapper } from '@0x/web3-wrapper';
import { JSONRPCRequestPayload, Provider, TxData } from 'ethereum-types'; import { JSONRPCRequestPayload, Provider, TxData } from 'ethereum-types';
import * as _ from 'lodash'; import * as _ from 'lodash';
@@ -20,6 +21,23 @@ export interface TraceCollectionSubproviderConfig {
shouldCollectGasEstimateTraces: boolean; shouldCollectGasEstimateTraces: boolean;
} }
type AsyncFunc = (...args: any[]) => Promise<void>;
// HACK: This wrapper outputs errors to console even if the promise gets ignored
// we need this because web3-provider-engine does not handle promises in
// the after function of next(after).
function logAsyncErrors(fn: AsyncFunc): AsyncFunc {
async function wrappedAsync(...args: any[]): Promise<void> {
try {
await fn(...args);
} catch (err) {
logUtils.log(err);
throw err;
}
}
return wrappedAsync;
}
// Because there is no notion of a call trace in the Ethereum rpc - we collect them in a rather non-obvious/hacky way. // Because there is no notion of a call trace in the Ethereum rpc - we collect them in a rather non-obvious/hacky way.
// On each call - we create a snapshot, execute the call as a transaction, get the trace, revert the snapshot. // On each call - we create a snapshot, execute the call as a transaction, get the trace, revert the snapshot.
// That allows us to avoid influencing test behaviour. // That allows us to avoid influencing test behaviour.
@@ -74,7 +92,7 @@ export abstract class TraceCollectionSubprovider extends Subprovider {
next(); next();
} else { } else {
const txData = payload.params[0]; const txData = payload.params[0];
next(this._onTransactionSentAsync.bind(this, txData)); next(logAsyncErrors(this._onTransactionSentAsync.bind(this, txData)));
} }
return; return;
@@ -83,7 +101,7 @@ export abstract class TraceCollectionSubprovider extends Subprovider {
next(); next();
} else { } else {
const callData = payload.params[0]; const callData = payload.params[0];
next(this._onCallOrGasEstimateExecutedAsync.bind(this, callData)); next(logAsyncErrors(this._onCallOrGasEstimateExecutedAsync.bind(this, callData)));
} }
return; return;
@@ -92,7 +110,7 @@ export abstract class TraceCollectionSubprovider extends Subprovider {
next(); next();
} else { } else {
const estimateGasData = payload.params[0]; const estimateGasData = payload.params[0];
next(this._onCallOrGasEstimateExecutedAsync.bind(this, estimateGasData)); next(logAsyncErrors(this._onCallOrGasEstimateExecutedAsync.bind(this, estimateGasData)));
} }
return; return;

View File

@@ -56,7 +56,7 @@ export class TraceCollector {
this._singleFileSubtraceHandler = singleFileSubtraceHandler; this._singleFileSubtraceHandler = singleFileSubtraceHandler;
} }
public async writeOutputAsync(): Promise<void> { public async writeOutputAsync(): Promise<void> {
const finalCoverage = this._collector.getFinalCoverage(); const finalCoverage: Coverage = this._collector.getFinalCoverage();
const stringifiedCoverage = JSON.stringify(finalCoverage, null, '\t'); const stringifiedCoverage = JSON.stringify(finalCoverage, null, '\t');
await mkdirpAsync('coverage'); await mkdirpAsync('coverage');
fs.writeFileSync('coverage/coverage.json', stringifiedCoverage); fs.writeFileSync('coverage/coverage.json', stringifiedCoverage);
@@ -80,14 +80,14 @@ export class TraceCollector {
const bytecodeHex = stripHexPrefix(bytecode); const bytecodeHex = stripHexPrefix(bytecode);
const sourceMap = isContractCreation ? contractData.sourceMap : contractData.sourceMapRuntime; const sourceMap = isContractCreation ? contractData.sourceMap : contractData.sourceMapRuntime;
const pcToSourceRange = parseSourceMap(contractData.sourceCodes, sourceMap, bytecodeHex, contractData.sources); const pcToSourceRange = parseSourceMap(contractData.sourceCodes, sourceMap, bytecodeHex, contractData.sources);
for (let fileIndex = 0; fileIndex < contractData.sources.length; fileIndex++) { _.map(contractData.sources, (_sourcePath: string, fileIndex: string) => {
const singleFileCoverageForTrace = this._singleFileSubtraceHandler( const singleFileCoverageForTrace = this._singleFileSubtraceHandler(
contractData, contractData,
traceInfo.subtrace, traceInfo.subtrace,
pcToSourceRange, pcToSourceRange,
fileIndex, _.parseInt(fileIndex),
); );
this._collector.add(singleFileCoverageForTrace); this._collector.add(singleFileCoverageForTrace);
} });
} }
} }

View File

@@ -1,7 +1,8 @@
import { NodeType } from '@0x/web3-wrapper';
import * as _ from 'lodash'; import * as _ from 'lodash';
import { constants } from './constants'; import { constants } from './constants';
import { getTracesByContractAddress } from './trace'; import { getContractAddressToTraces } from './trace';
import { TraceCollectionSubprovider } from './trace_collection_subprovider'; import { TraceCollectionSubprovider } from './trace_collection_subprovider';
import { TraceInfo, TraceInfoExistingContract, TraceInfoNewContract } from './types'; import { TraceInfo, TraceInfoExistingContract, TraceInfoNewContract } from './types';
@@ -12,18 +13,50 @@ export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
protected abstract _handleTraceInfoAsync(traceInfo: TraceInfo): Promise<void>; protected abstract _handleTraceInfoAsync(traceInfo: TraceInfo): Promise<void>;
protected async _recordTxTraceAsync(address: string, data: string | undefined, txHash: string): Promise<void> { protected async _recordTxTraceAsync(address: string, data: string | undefined, txHash: string): Promise<void> {
await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0); await this._web3Wrapper.awaitTransactionMinedAsync(txHash, 0);
const trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, { const nodeType = await this._web3Wrapper.getNodeTypeAsync();
let trace;
if (nodeType === NodeType.Geth) {
// For very large traces we use a custom tracer that outputs a format compatible with a
// regular trace. We only need the 2nd item on the stack when the instruction is a call.
// By not including other stack values, we drastically limit the amount of data to be collected.
// There are no good docs about how to write those tracers, but you can find some example ones here:
// https://github.com/ethereum/go-ethereum/tree/master/eth/tracers/internal/tracers
const tracer = `
{
data: [],
step: function(log) {
const op = log.op.toString();
const opn = 0 | log.op.toNumber();
const pc = 0 | log.getPC();
const depth = 0 | log.getDepth();
const gasCost = 0 | log.getCost();
const gas = 0 | log.getGas();
const isCall = opn == 0xf1 || opn == 0xf2 || opn == 0xf4 || opn == 0xf5;
const stack = isCall ? ['0x'+log.stack.peek(1).toString(16), null] : null;
this.data.push({ pc, gasCost, depth, op, stack, gas });
},
fault: function() { },
result: function() { return {structLogs: this.data}; }
}
`;
trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, { tracer, timeout: '600s' });
} else {
/**
* Ganache doesn't support custom tracers yet.
*/
trace = await this._web3Wrapper.getTransactionTraceAsync(txHash, {
disableMemory: true, disableMemory: true,
disableStack: false, disableStack: false,
disableStorage: true, disableStorage: true,
}); });
const tracesByContractAddress = getTracesByContractAddress(trace.structLogs, address); }
const subcallAddresses = _.keys(tracesByContractAddress); const contractAddressToTraces = getContractAddressToTraces(trace.structLogs, address);
const subcallAddresses = _.keys(contractAddressToTraces);
if (address === constants.NEW_CONTRACT) { if (address === constants.NEW_CONTRACT) {
for (const subcallAddress of subcallAddresses) { for (const subcallAddress of subcallAddresses) {
let traceInfo: TraceInfoNewContract | TraceInfoExistingContract; let traceInfo: TraceInfoNewContract | TraceInfoExistingContract;
if (subcallAddress === 'NEW_CONTRACT') { if (subcallAddress === 'NEW_CONTRACT') {
const traceForThatSubcall = tracesByContractAddress[subcallAddress]; const traceForThatSubcall = contractAddressToTraces[subcallAddress];
traceInfo = { traceInfo = {
subtrace: traceForThatSubcall, subtrace: traceForThatSubcall,
txHash, txHash,
@@ -32,7 +65,7 @@ export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
}; };
} else { } else {
const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress); const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress);
const traceForThatSubcall = tracesByContractAddress[subcallAddress]; const traceForThatSubcall = contractAddressToTraces[subcallAddress];
traceInfo = { traceInfo = {
subtrace: traceForThatSubcall, subtrace: traceForThatSubcall,
txHash, txHash,
@@ -45,7 +78,7 @@ export abstract class TraceInfoSubprovider extends TraceCollectionSubprovider {
} else { } else {
for (const subcallAddress of subcallAddresses) { for (const subcallAddress of subcallAddresses) {
const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress); const runtimeBytecode = await this._web3Wrapper.getContractCodeAsync(subcallAddress);
const traceForThatSubcall = tracesByContractAddress[subcallAddress]; const traceForThatSubcall = contractAddressToTraces[subcallAddress];
const traceInfo: TraceInfoExistingContract = { const traceInfo: TraceInfoExistingContract = {
subtrace: traceForThatSubcall, subtrace: traceForThatSubcall,
txHash, txHash,

View File

@@ -16,7 +16,7 @@ export interface SingleFileSourceRange {
end: LineColumn; end: LineColumn;
} }
export interface LocationByOffset { export interface OffsetToLocation {
[offset: number]: LineColumn; [offset: number]: LineColumn;
} }
@@ -76,13 +76,20 @@ export interface Coverage {
}; };
} }
export interface SourceCodes {
[sourceId: number]: string;
}
export interface Sources {
[sourceId: number]: string;
}
export interface ContractData { export interface ContractData {
bytecode: string; bytecode: string;
sourceMap: string; sourceMap: string;
runtimeBytecode: string; runtimeBytecode: string;
sourceMapRuntime: string; sourceMapRuntime: string;
sourceCodes: string[]; sourceCodes: SourceCodes;
sources: string[]; sources: Sources;
} }
// Part of the trace executed within the same context // Part of the trace executed within the same context

View File

@@ -23,6 +23,12 @@ export const utils = {
utils.compareLineColumn(childRange.end, parentRange.end) <= 0 utils.compareLineColumn(childRange.end, parentRange.end) <= 0
); );
}, },
isRangeEqual(childRange: SingleFileSourceRange, parentRange: SingleFileSourceRange): boolean {
return (
utils.compareLineColumn(parentRange.start, childRange.start) === 0 &&
utils.compareLineColumn(childRange.end, parentRange.end) === 0
);
},
bytecodeToBytecodeRegex(bytecode: string): string { bytecodeToBytecodeRegex(bytecode: string): string {
const bytecodeRegex = bytecode const bytecodeRegex = bytecode
// Library linking placeholder: __ConvertLib____________________________ // Library linking placeholder: __ConvertLib____________________________

View File

@@ -4,7 +4,7 @@ import * as _ from 'lodash';
import 'mocha'; import 'mocha';
import * as path from 'path'; import * as path from 'path';
import { getLocationByOffset, parseSourceMap } from '../src/source_maps'; import { getOffsetToLocation, parseSourceMap } from '../src/source_maps';
const expect = chai.expect; const expect = chai.expect;
@@ -15,7 +15,7 @@ const simplestContract = fs.readFileSync(simplestContractFileName).toString();
describe('source maps', () => { describe('source maps', () => {
describe('#getLocationByOffset', () => { describe('#getLocationByOffset', () => {
it('correctly computes location by offset', () => { it('correctly computes location by offset', () => {
const locationByOffset = getLocationByOffset(simplestContract); const offsetToLocation = getOffsetToLocation(simplestContract);
const expectedLocationByOffset = { const expectedLocationByOffset = {
'0': { line: 1, column: 0 }, '0': { line: 1, column: 0 },
'1': { line: 1, column: 1 }, '1': { line: 1, column: 1 },
@@ -41,7 +41,7 @@ describe('source maps', () => {
'21': { line: 2, column: 1 }, '21': { line: 2, column: 1 },
'22': { line: 3, column: 0 }, '22': { line: 3, column: 0 },
}; };
expect(locationByOffset).to.be.deep.equal(expectedLocationByOffset); expect(offsetToLocation).to.be.deep.equal(expectedLocationByOffset);
}); });
}); });
describe('#parseSourceMap', () => { describe('#parseSourceMap', () => {

View File

@@ -3,7 +3,7 @@ import { OpCode, StructLog } from 'ethereum-types';
import * as _ from 'lodash'; import * as _ from 'lodash';
import 'mocha'; import 'mocha';
import { getTracesByContractAddress } from '../src/trace'; import { getContractAddressToTraces } from '../src/trace';
const expect = chai.expect; const expect = chai.expect;
@@ -44,7 +44,7 @@ describe('Trace', () => {
]; ];
const fullTrace = _.map(trace, compactStructLog => addDefaultStructLogFields(compactStructLog)); const fullTrace = _.map(trace, compactStructLog => addDefaultStructLogFields(compactStructLog));
const startAddress = '0x0000000000000000000000000000000000000001'; const startAddress = '0x0000000000000000000000000000000000000001';
const traceByContractAddress = getTracesByContractAddress(fullTrace, startAddress); const traceByContractAddress = getContractAddressToTraces(fullTrace, startAddress);
const expectedTraceByContractAddress = { const expectedTraceByContractAddress = {
[startAddress]: [fullTrace[0], fullTrace[2]], [startAddress]: [fullTrace[0], fullTrace[2]],
[delegateCallAddress]: [fullTrace[1]], [delegateCallAddress]: [fullTrace[1]],