Merge branch 'development' into pull-github-data

This commit is contained in:
Alex Svanevik
2019-03-19 14:31:31 +08:00
committed by GitHub
302 changed files with 7322 additions and 1338 deletions

View File

@@ -0,0 +1,40 @@
import { MigrationInterface, QueryRunner, Table, TableIndex } from 'typeorm';
const applications = new Table({
name: 'raw.greenhouse_applications',
columns: [
{ name: 'id', type: 'bigint', isPrimary: true },
{ name: 'last_activity_at', type: 'timestamp', isPrimary: true },
{ name: 'candidate_id', type: 'bigint' },
{ name: 'applied_at', type: 'timestamp' },
{ name: 'status', type: 'varchar' },
{ name: 'rejected_at', type: 'timestamp', isNullable: true },
{ name: 'source_id', type: 'bigint', isNullable: true },
{ name: 'source_name', type: 'varchar', isNullable: true },
{ name: 'credited_to_id', type: 'bigint', isNullable: true },
{ name: 'credited_to_name', type: 'varchar', isNullable: true },
{ name: 'current_stage_id', type: 'bigint', isNullable: true },
{ name: 'current_stage_name', type: 'varchar', isNullable: true },
],
});
const applicationIndices = [
new TableIndex({ columnNames: ['last_activity_at', 'current_stage_id'] }),
new TableIndex({ columnNames: ['current_stage_id'] }),
new TableIndex({ columnNames: ['candidate_id'] }),
new TableIndex({ columnNames: ['credited_to_id'] }),
];
export class CreateGreenhouseTables1551465374766 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.createTable(applications);
await queryRunner.createIndices('raw.greenhouse_applications', applicationIndices);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.dropIndices('raw.greenhouse_applications', applicationIndices);
await queryRunner.dropTable(applications);
}
}

Binary file not shown.

View File

@@ -2,7 +2,9 @@ import { fetchAsync } from '@0x/utils';
import Bottleneck from 'bottleneck';
const ONE_SECOND = 1000;
const EDPS_BASE_URL = 'http://35.185.219.196:1337';
const EDPS_BASE_URL = 'http://23.22.220.126:1337';
// tslint:disable-next-line:custom-no-magic-numbers
const TIMEOUT = ONE_SECOND * 30;
export type EdpsResponse = EdpsWrapper[];
@@ -36,7 +38,7 @@ export class EdpsSource {
*/
public async getEdpsAsync(direction: string, symbol: string, amount: number): Promise<EdpsWrapper> {
const edpsUrl = `${EDPS_BASE_URL}/${direction}?amount=${amount}&symbol=${symbol}&decimals=`;
const resp = await this._limiter.schedule(() => fetchAsync(edpsUrl));
const resp = await this._limiter.schedule(() => fetchAsync(edpsUrl, {}, TIMEOUT));
const respJson: EdpsResponse = await resp.json();
const allExchanges: EdpsWrapper = {};
// The below unwraps the response so we get 1 single EdpsWrapper object

View File

@@ -0,0 +1,58 @@
import { fetchAsync } from '@0x/utils';
import { stringify } from 'querystring';
const HTTP_OK_STATUS = 200;
const GREENHOUSE_URI = 'https://harvest.greenhouse.io/v1';
enum GreenhouseStatus {
Active = 'active',
Rejected = 'rejected',
Hired = 'hired',
}
export interface GreenhouseApplicationResponse {
id: number;
candidate_id: number;
applied_at: string;
rejected_at?: string;
last_activity_at: string;
credited_to: {
id: number;
name: string;
};
source: {
id: number;
public_name: string;
};
status: GreenhouseStatus;
current_stage: {
id: number;
name: string;
};
}
function httpErrorCheck(response: Response): void {
if (response.status !== HTTP_OK_STATUS) {
throw new Error(`HTTP error while scraping Greenhouse: [${JSON.stringify(response)}]`);
}
}
export class GreenhouseSource {
private readonly _authHeaders: any;
constructor(accessToken: string) {
this._authHeaders = {
'Content-Type': 'application/json',
Authorization: `Basic ${Buffer.from(`${accessToken}:`).toString('base64')}`,
};
}
public async fetchApplicationsAsync(startTime: Date): Promise<GreenhouseApplicationResponse[]> {
const queryParams = stringify({
last_activity_after: startTime.toISOString(),
per_page: 500, // max
});
const resp = await fetchAsync(`${GREENHOUSE_URI}/applications?${queryParams}`, {
headers: this._authHeaders,
});
httpErrorCheck(resp);
return resp.json();
}
}

View File

@@ -8,12 +8,18 @@ import { fetchSuccessfullyOrThrowAsync } from '../../utils';
export const NONFUNGIBLE_DOT_COM_URL = 'https://nonfungible.com/api/v1';
// Number of trades to get at once. This is a hard limit enforced by the API.
const MAX_TRADES_PER_QUERY = 100;
// Chunk sizes for trade history splitting for storage on S3. cryptokitties is 800 MB. others are manageable.
export const S3_CHUNK_SIZES: { [publisher: string]: number } = {
cryptokitties: 40000, // 40K trades puts the chunk file size on a par with the axieinfinity file size.
};
// Note(albrow): For now this will have to be manually updated by checking
// https://nonfungible.com/
export const knownPublishers = [
'axieinfinity',
// 'cryptokitties', // disabled until we get updated initial dump that isn't truncated
'chainbreakers',
'chibifighters',
'cryptokitties',
'cryptopunks',
'cryptovoxels',
'decentraland',
@@ -23,6 +29,7 @@ export const knownPublishers = [
'ethtown',
// 'knownorigin', // disabled because of null characters in data being rejected by postgres
// 'mythereum', // worked at one time, but now seems dead
'mlbcryptobaseball',
'superrare',
];
@@ -35,7 +42,7 @@ export interface NonfungibleDotComTradeResponse {
transactionHash: string;
blockNumber: number;
logIndex: number;
blockTimestamp: string;
blockTimestamp: string | number; // string from API, number from initial dump
assetId: string;
assetDescriptor: string;
nftAddress: string;
@@ -59,120 +66,163 @@ export interface NonfungibleDotComTradeResponse {
* Gets and returns all trades for the given publisher, starting at the given block number.
* Automatically handles pagination.
* @param publisher A valid "publisher" for the nonfungible.com API. (e.g. "cryptokitties")
* @param blockNumberStart The block number to start querying from.
* @param blockNumberStart The block number to start querying from. A value of
* 0 indicates that trades should be pulled from the initial dump before
* querying the API.
*/
export async function getTradesAsync(
publisher: string,
blockNumberStart: number,
): Promise<NonfungibleDotComTradeResponse[]> {
const allTrades: NonfungibleDotComTradeResponse[] = [];
/**
* Because we need to de-duplicate trades as they come in, and because some
* projects have a ton of trades (eg cryptokitties), we can't do a simple
* O(n^2) search for each trade in all of the trades we've already received.
* So, we temporarily store trades in a map, for quick lookup while
* de-duplicating. Later, we'll convert the map to an array for the
* caller.
*/
const blockNumberToTrades = new Map<number, NonfungibleDotComTradeResponse[]>();
/**
* due to high data volumes and rate limiting, we procured an initial data
* dump from nonfungible.com. If the requested starting block number is
* contained in that initial dump, then pull relevant trades from there
* first. Later (below) we'll get the more recent trades from the API itself.
* The API returns trades in reverse chronological order, so highest block
* numbers first. This variable dictates when to stop pulling trades from
* the API.
*/
let blockNumberStop = blockNumberStart;
if (blockNumberStart < highestBlockNumbersInIntialDump[publisher]) {
logUtils.log('getting trades from one-time dump');
// caller needs trades that are in the initial data dump, so get them
// from there, then later go to the API for the rest.
const initialDumpResponse: NonfungibleDotComHistoryResponse = await fetchSuccessfullyOrThrowAsync(
getInitialDumpUrl(publisher),
);
/**
* Due to high data volumes and rate limiting, we procured an initial data
* dump from nonfungible.com. If the sentinel value 0 is passed for
* `blockNumberStart`, that indicates we should pull trades from the
* initial dump before going to the API.
*/
if (blockNumberStop === 0) {
logUtils.log('getting trades from initial dump');
const initialDumpResponse: NonfungibleDotComHistoryResponse = await getInitialDumpTradesAsync(publisher);
const initialDumpTrades = initialDumpResponse.data;
logUtils.log(`got ${initialDumpTrades.length} trades from initial dump.`);
for (const initialDumpTrade of initialDumpTrades) {
if (!shouldProcessTrade(initialDumpTrade, allTrades)) {
ensureNonNull(initialDumpTrade);
if (doesTradeAlreadyExist(initialDumpTrade, blockNumberToTrades)) {
continue;
}
ensureNonNull(initialDumpTrade);
if (!blockNumberToTrades.has(initialDumpTrade.blockNumber)) {
blockNumberToTrades.set(initialDumpTrade.blockNumber, []);
}
allTrades.push(initialDumpTrade);
const tradesForBlock = blockNumberToTrades.get(initialDumpTrade.blockNumber);
if (tradesForBlock === undefined) {
throw new Error('tradesForBlock is undefined');
}
tradesForBlock.push(initialDumpTrade);
blockNumberStop = initialDumpTrade.blockNumber;
}
logUtils.log(`got ${allTrades.length} from one-time dump`);
}
const fullUrl = getFullUrlForPublisher(publisher);
/**
* API returns trades in reverse chronological order, so highest block
* The API returns trades in reverse chronological order, so highest block
* numbers first. The `start` query parameter indicates how far back in
* time (in number of trades) the results should start. Here we iterate
* over both start parameter values and block numbers simultaneously.
* Start parameter values count up from zero. Block numbers count down
* until reaching the highest block number in the initial dump.
* until reaching `blockNumberStop`.
*/
const blockNumberStop = Math.max(highestBlockNumbersInIntialDump[publisher] + 1, blockNumberStart);
for (
let startParam = 0, blockNumber = Number.MAX_SAFE_INTEGER;
blockNumber > blockNumberStop;
startParam += MAX_TRADES_PER_QUERY
) {
const response = await _getTradesWithOffsetAsync(fullUrl, publisher, startParam);
const response = await _getTradesWithOffsetAsync(getFullUrlForPublisher(publisher), publisher, startParam);
const tradesFromApi = response.data;
if (tradesFromApi.length === 0) {
break;
}
logUtils.log(
`got ${
tradesFromApi.length
} trades from API. blockNumber=${blockNumber}. blockNumberStop=${blockNumberStop}`,
);
for (const tradeFromApi of tradesFromApi) {
ensureNonNull(tradeFromApi);
// convert date from "2019-03-06T17:36:24.000Z" to unix epoch integer
const msPerSec = 1000;
tradeFromApi.blockTimestamp = Math.floor(new Date(tradeFromApi.blockTimestamp).valueOf() / msPerSec);
if (tradeFromApi.blockNumber <= blockNumberStop) {
blockNumber = blockNumberStop;
break;
}
if (!shouldProcessTrade(tradeFromApi, allTrades)) {
if (doesTradeAlreadyExist(tradeFromApi, blockNumberToTrades)) {
continue;
}
ensureNonNull(tradeFromApi);
allTrades.push(tradeFromApi);
if (!blockNumberToTrades.has(tradeFromApi.blockNumber)) {
blockNumberToTrades.set(tradeFromApi.blockNumber, []);
}
const tradesForBlock = blockNumberToTrades.get(tradeFromApi.blockNumber);
if (tradesForBlock === undefined) {
throw new Error('tradesForBlock is undefined');
}
tradesForBlock.push(tradeFromApi);
blockNumber = tradeFromApi.blockNumber;
}
}
/**
* now that we have all the trades in the map, convert that map to a simple array for the caller.
*/
const allTrades: NonfungibleDotComTradeResponse[] = [];
for (const blockNumber of blockNumberToTrades.keys()) {
const tradesForBlock = blockNumberToTrades.get(blockNumber);
if (tradesForBlock === undefined) {
throw new Error('tradesForBlock is undefined');
}
for (const trade of tradesForBlock) {
allTrades.push(trade);
}
}
return allTrades;
}
function shouldProcessTrade(
function doesTradeAlreadyExist(
trade: NonfungibleDotComTradeResponse,
existingTrades: NonfungibleDotComTradeResponse[],
existingTrades: Map<number, NonfungibleDotComTradeResponse[]>,
): boolean {
// check to see if this trade is already in existingTrades
const existingTradeIndex = existingTrades.findIndex(
// HACK! making assumptions about composition of primary key
e =>
e.transactionHash === trade.transactionHash &&
e.logIndex === trade.logIndex &&
e.blockNumber === trade.blockNumber,
);
if (existingTradeIndex !== -1) {
logUtils.log("we've already captured this trade. deciding whether to use the existing record or this one.");
if (trade.blockNumber > existingTrades[existingTradeIndex].blockNumber) {
logUtils.log('throwing out existing trade');
existingTrades.splice(existingTradeIndex, 1);
} else {
logUtils.log('letting existing trade stand, and skipping processing of this trade');
return false;
}
const tradesForBlock: NonfungibleDotComTradeResponse[] | undefined = existingTrades.get(trade.blockNumber);
if (tradesForBlock === undefined) {
return false;
}
if (
tradesForBlock.find(
// HACK! making assumptions about composition of primary key
e =>
e.transactionHash === trade.transactionHash &&
e.assetId === trade.assetId &&
e.blockNumber === trade.blockNumber &&
e.logIndex === trade.logIndex,
) === undefined
) {
return false;
}
return true;
}
const highestBlockNumbersInIntialDump: { [publisher: string]: number } = {
axieinfinity: 7065913,
cryptokitties: 4658171,
cryptopunks: 7058897,
cryptovoxels: 7060783,
decentraland_estate: 7065181,
decentraland: 6938962,
etherbots: 5204980,
etheremon: 7065370,
ethtown: 7064126,
knownorigin: 7065160,
mythereum: 7065311,
superrare: 7065955,
const numberOfTradesInInitialDump: { [publisher: string]: number } = {
cryptokitties: 1986316,
};
async function _getTradesWithOffsetAsync(
@@ -194,15 +244,11 @@ function getFullUrlForPublisher(publisher: string): string {
return `${NONFUNGIBLE_DOT_COM_URL}/market/${publisher}/history`;
}
function getInitialDumpUrl(publisher: string): string {
return `https://nonfungible-dot-com-one-time-data-dump.s3.amazonaws.com/sales_summary_${publisher}.json`;
}
function ensureNonNull(trade: NonfungibleDotComTradeResponse): void {
// these fields need to be set in order to avoid non-null
// constraint exceptions upon database insertion.
if (trade.logIndex === undefined) {
// for cryptopunks
// for cryptopunks and cryptokitties
trade.logIndex = 0;
}
if (trade.assetDescriptor === undefined) {
@@ -218,3 +264,27 @@ function ensureNonNull(trade: NonfungibleDotComTradeResponse): void {
trade.marketAddress = '';
}
}
async function getInitialDumpTradesAsync(publisher: string): Promise<NonfungibleDotComHistoryResponse> {
const s3UrlPrefix = 'https://nonfungible-dot-com-one-time-data-dump.s3.amazonaws.com/sales_summary_';
// large data sets (eg cryptokitties) need to be chunked for ease of I/O with S3.
// this function expects data to be chunked per ../../../scripts/partition_nonfungible_dot_com_dump.ts
if (S3_CHUNK_SIZES.hasOwnProperty(publisher)) {
let reconsolidated: NonfungibleDotComTradeResponse[] = [];
const numberOfChunks = Math.ceil(numberOfTradesInInitialDump[publisher] / S3_CHUNK_SIZES[publisher]);
logUtils.log(`Retrieving ${numberOfChunks} separate chunks from S3.`);
for (let i = 0; i < numberOfChunks; i++) {
logUtils.log(`Retrieving chunk ${i}...`);
const chunkData = await fetchSuccessfullyOrThrowAsync(`${s3UrlPrefix}${publisher}${i}.json`);
reconsolidated = reconsolidated.concat(chunkData);
}
return { data: reconsolidated };
}
try {
return await fetchSuccessfullyOrThrowAsync(`${s3UrlPrefix}${publisher}.json`);
} catch (error) {
logUtils.log(`Failed to retrieve initial dump for publisher '${publisher}'. Assuming there isn't one.`);
return { data: [] };
}
}

View File

@@ -2,12 +2,17 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { numberToBigIntTransformer } from '../utils';
// Blocks are pulled directly from an Ethereum node (or from something like
// Infura).
@Entity({ name: 'blocks', schema: 'raw' })
export class Block {
// Block hash
@PrimaryColumn() public hash!: string;
// Block number
@PrimaryColumn({ transformer: numberToBigIntTransformer })
public number!: number;
// Timestamp when the block was mined (in ms since Unix Epoch)
@Column({ name: 'timestamp', transformer: numberToBigIntTransformer })
public timestamp!: number;
}

View File

@@ -3,54 +3,81 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// dex_trades contains on-chain trades that have occurred on a decentralized
// exchange (including 0x and some competitors).
@Entity({ name: 'dex_trades', schema: 'raw' })
export class DexTrade {
// Typically an API URL where this trade was obtained.
@PrimaryColumn({ name: 'source_url' })
public sourceUrl!: string;
// The hash of the transaction this trade was a part of.
@PrimaryColumn({ name: 'tx_hash' })
public txHash!: string;
// Trade index is a unique identifier for the trade. Not necessarily
// supported by all sources.
@PrimaryColumn({ name: 'trade_index' })
public tradeIndex!: string;
// The timestamp at which this transaction occurred (in ms since Unix Epoch).
@Column({ name: 'tx_timestamp', type: 'bigint', transformer: numberToBigIntTransformer })
public txTimestamp!: number;
// Deprecated. Recently removed from the Bloxy API.
@Column({ name: 'tx_date' })
public txDate!: string;
// The sender of the on-chain transaction.
@Column({ name: 'tx_sender' })
public txSender!: string;
// Deprecated? No longer seems to be part of the Bloxy API.
@Column({ name: 'smart_contract_id', type: 'bigint', transformer: numberToBigIntTransformer })
public smartContractId!: number;
// The address of the smart contract where the trade was executed.
@Column({ name: 'smart_contract_address' })
public smartContractAddress!: string;
// Deprecated? No longer seems to be part of the Bloxy API.
@Column({ name: 'contract_type' })
public contractType!: string;
// The address of the maker.
@Column({ type: 'varchar' })
public maker!: string;
// The address of the taker.
@Column({ type: 'varchar' })
public taker!: string;
// The amount of asset being bought.
@Column({ name: 'amount_buy', type: 'numeric', transformer: bigNumberTransformer })
public amountBuy!: BigNumber;
// The fee paid by the maker.
@Column({ name: 'maker_fee_amount', type: 'numeric', transformer: bigNumberTransformer })
public makerFeeAmount!: BigNumber;
// Deprecated? No longer seems to be part of the Bloxy API.
@Column({ name: 'buy_currency_id', type: 'bigint', transformer: numberToBigIntTransformer })
public buyCurrencyId!: number;
// The symbol of the asset being bought.
@Column({ name: 'buy_symbol' })
public buySymbol!: string;
// The amount being sold.
@Column({ name: 'amount_sell', type: 'numeric', transformer: bigNumberTransformer })
public amountSell!: BigNumber;
// The fee paid by the taker.
@Column({ name: 'taker_fee_amount', type: 'numeric', transformer: bigNumberTransformer })
public takerFeeAmount!: BigNumber;
// Deprecated? No longer seems to be part of the Bloxy API.
@Column({ name: 'sell_currency_id', type: 'bigint', transformer: numberToBigIntTransformer })
public sellCurrencyId!: number;
// The symbol of the asset being sold.
@Column({ name: 'sell_symbol' })
public sellSymbol!: string;
// Annotation for the maker address.
@Column({ name: 'maker_annotation' })
public makerAnnotation!: string;
// Annotation for the taker address.
@Column({ name: 'taker_annotation' })
public takerAnnotation!: string;
// String representation of the DEX protocol (e.g. "IDEX")
@Column() public protocol!: string;
// The address of the token being bought.
@Column({ name: 'buy_address', type: 'varchar', nullable: true })
public buyAddress!: string | null;
// The address of the token being sold.
@Column({ name: 'sell_address', type: 'varchar', nullable: true })
public sellAddress!: string | null;
}

View File

@@ -3,24 +3,37 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// These events come directly from an ERC20 token contract and are fired
// whenever someone updates or sets an approval.
// See: https://github.com/0xProject/0x-protocol-specification/blob/master/v2/v2-specification.md#erc20proxy
@Entity({ name: 'erc20_approval_events', schema: 'raw' })
export class ERC20ApprovalEvent {
// The address of the token for which allowance has been set.
@PrimaryColumn({ name: 'token_address' })
public tokenAddress!: string;
// The index of the event log.
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
// The block number where the event occurred.
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// The hash of the transaction where this event occurred.
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// The raw data which comes directly from the event.
@Column({ name: 'raw_data' })
public rawData!: string;
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// Note: the following fields are parsed from the raw_data.
// The address of the owner (i.e., the person setting the allowance).
@Column({ name: 'owner_address' })
public ownerAddress!: string;
// The address of the spender (i.e., our asset proxy).
@Column({ name: 'spender_address' })
public spenderAddress!: string;
// The amount of the allowance.
@Column({ name: 'amount', type: 'numeric', transformer: bigNumberTransformer })
public amount!: BigNumber;
}

View File

@@ -3,49 +3,74 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { AssetType } from '../types';
import { numberToBigIntTransformer } from '../utils';
// These events come directly from the Exchange contract and are fired whenever
// someone cancels an order.
// See https://github.com/0xProject/0x-protocol-specification/blob/master/v2/v2-specification.md#cancelorder
@Entity({ name: 'exchange_cancel_events', schema: 'raw' })
export class ExchangeCancelEvent {
// The address of the smart contract where this event was fired.
@PrimaryColumn({ name: 'contract_address' })
public contractAddress!: string;
// The index of the event log.
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
// The block number where the event occurred.
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// The hash of the transaction where this event occurred.
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// The raw data which comes directly from the event.
@Column({ name: 'raw_data' })
public rawData!: string;
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// Note: the following fields are parsed from the raw_data.
// The address of the maker.
@Column({ name: 'maker_address' })
public makerAddress!: string;
// The address of the taker (may be null).
@Column({ nullable: true, type: String, name: 'taker_address' })
public takerAddress!: string;
// The address of the fee recepient. Can be used to identify the relayer.
@Column({ name: 'fee_recipient_address' })
public feeRecipientAddress!: string;
// The address of the sender (used for extension contracts).
@Column({ name: 'sender_address' })
public senderAddress!: string;
// The hash of the order that was cancelled.
@Column({ name: 'order_hash' })
public orderHash!: string;
// The raw maker asset data.
@Column({ name: 'raw_maker_asset_data' })
public rawMakerAssetData!: string;
// The maker asset type (e.g. 'erc20' or 'erc721').
@Column({ name: 'maker_asset_type' })
public makerAssetType!: AssetType;
// The id of the AssetProxy used for the maker asset.
@Column({ name: 'maker_asset_proxy_id' })
public makerAssetProxyId!: string;
// The address of the maker token.
@Column({ name: 'maker_token_address' })
public makerTokenAddress!: string;
// The id of the maker token (always null for ERC20 tokens).
@Column({ nullable: true, type: String, name: 'maker_token_id' })
public makerTokenId!: string | null;
// The raw taker asset data.
@Column({ name: 'raw_taker_asset_data' })
public rawTakerAssetData!: string;
// The taker asset type (e.g. 'erc20' or 'erc721').
@Column({ name: 'taker_asset_type' })
public takerAssetType!: AssetType;
// The id of the AssetProxy used for the taker asset.
@Column({ name: 'taker_asset_proxy_id' })
public takerAssetProxyId!: string;
// The address of the taker token.
@Column({ name: 'taker_token_address' })
public takerTokenAddress!: string;
// The id of the taker token (always null for ERC20 tokens).
@Column({ nullable: true, type: String, name: 'taker_token_id' })
public takerTokenId!: string | null;
}

View File

@@ -3,24 +3,37 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// These events come directly from the Exchange contract and are fired whenever
// someone cancels orders using the cancelOrdersUpTo function.
// See https://github.com/0xProject/0x-protocol-specification/blob/master/v2/v2-specification.md#cancelordersupto
@Entity({ name: 'exchange_cancel_up_to_events', schema: 'raw' })
export class ExchangeCancelUpToEvent {
// The address of the smart contract where this event was fired.
@PrimaryColumn({ name: 'contract_address' })
public contractAddress!: string;
// The index of the event log.
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
// The block number where the event occurred.
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// The hash of the transaction where this event occurred.
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// The raw data which comes directly from the event.
@Column({ name: 'raw_data' })
public rawData!: string;
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// Note: the following fields are parsed from the raw_data.
// The address of the maker.
@Column({ name: 'maker_address' })
public makerAddress!: string;
// The address of the sender (used for extension contracts).
@Column({ name: 'sender_address' })
public senderAddress!: string;
// Orders with a salt less than or equal to this value will be cancelled.
@Column({ name: 'order_epoch', type: 'numeric', transformer: bigNumberTransformer })
public orderEpoch!: BigNumber;
}

View File

@@ -4,57 +4,86 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { AssetType } from '../types';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// These events come directly from the Exchange contract and are fired whenever
// someone fills an order.
// See https://github.com/0xProject/0x-protocol-specification/blob/master/v2/v2-specification.md#filling-orders
@Entity({ name: 'exchange_fill_events', schema: 'raw' })
export class ExchangeFillEvent {
// The address of the smart contract where this event was fired.
@PrimaryColumn({ name: 'contract_address' })
public contractAddress!: string;
// The index of the event log.
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
// The block number where the event occurred.
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// The hash of the transaction where this event occurred.
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// The raw data which comes directly from the event.
@Column({ name: 'raw_data' })
public rawData!: string;
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// Note: the following fields are parsed from the raw_data.
// The address of the maker.
@Column({ name: 'maker_address' })
public makerAddress!: string;
// The address of the taker (may be null).
@Column({ name: 'taker_address' })
public takerAddress!: string;
// The address of the fee recepient. Can be used to identify the relayer.
@Column({ name: 'fee_recipient_address' })
public feeRecipientAddress!: string;
// The address of the sender (used for extension contracts).
@Column({ name: 'sender_address' })
public senderAddress!: string;
// The amount of the maker asset which was filled.
@Column({ name: 'maker_asset_filled_amount', type: 'numeric', transformer: bigNumberTransformer })
public makerAssetFilledAmount!: BigNumber;
// The amount of the taker asset which was filled.
@Column({ name: 'taker_asset_filled_amount', type: 'numeric', transformer: bigNumberTransformer })
public takerAssetFilledAmount!: BigNumber;
// The fee paid by the maker.
@Column({ name: 'maker_fee_paid', type: 'numeric', transformer: bigNumberTransformer })
public makerFeePaid!: BigNumber;
// The fee paid by the taker.
@Column({ name: 'taker_fee_paid', type: 'numeric', transformer: bigNumberTransformer })
public takerFeePaid!: BigNumber;
// The hash of the order which was filled.
@Column({ name: 'order_hash' })
public orderHash!: string;
// The raw maker asset data.
@Column({ name: 'raw_maker_asset_data' })
public rawMakerAssetData!: string;
// The maker asset type (e.g. 'erc20' or 'erc721').
@Column({ name: 'maker_asset_type' })
public makerAssetType!: AssetType;
// The id of the AssetProxy used for the maker asset.
@Column({ name: 'maker_asset_proxy_id' })
public makerAssetProxyId!: string;
// The address of the maker token.
@Column({ name: 'maker_token_address' })
public makerTokenAddress!: string;
// The id of the maker token (always null for ERC20 tokens).
@Column({ nullable: true, type: String, name: 'maker_token_id' })
public makerTokenId!: string | null;
// The raw taker asset data.
@Column({ name: 'raw_taker_asset_data' })
public rawTakerAssetData!: string;
// The taker asset type (e.g. 'erc20' or 'erc721').
@Column({ name: 'taker_asset_type' })
public takerAssetType!: AssetType;
// The id of the AssetProxy used for the taker asset.
@Column({ name: 'taker_asset_proxy_id' })
public takerAssetProxyId!: string;
// The address of the taker token.
@Column({ name: 'taker_token_address' })
public takerTokenAddress!: string;
// The id of the taker token (always null for ERC20 tokens).
@Column({ nullable: true, type: String, name: 'taker_token_id' })
public takerTokenId!: string | null;
}

View File

@@ -0,0 +1,47 @@
import { Column, Entity, Index, PrimaryColumn } from 'typeorm';
import { numberToBigIntTransformer } from '../utils';
@Entity({ name: 'greenhouse_applications', schema: 'raw' })
export class GreenhouseApplication {
@Index()
@PrimaryColumn({ type: 'bigint', transformer: numberToBigIntTransformer })
public id!: number;
@Index()
@Column({ type: 'bigint', transformer: numberToBigIntTransformer })
public candidate_id!: number;
@Column({ type: 'timestamp' })
public applied_at!: Date;
@Column({ type: 'timestamp', nullable: true })
public rejected_at?: Date;
@Index()
@PrimaryColumn({ type: 'timestamp' })
public last_activity_at!: Date;
@Column({ type: 'bigint', nullable: true, transformer: numberToBigIntTransformer })
public source_id?: number;
@Column({ type: 'varchar', nullable: true })
public source_name?: string;
@Index()
@Column({ type: 'bigint', nullable: true, transformer: numberToBigIntTransformer })
public credited_to_id?: number;
@Column({ type: 'varchar', nullable: true })
public credited_to_name?: string;
@Column({ type: 'varchar' })
public status!: string;
@Index()
@Column({ type: 'bigint', nullable: true, transformer: numberToBigIntTransformer })
public current_stage_id?: number;
@Column({ type: 'varchar', nullable: true })
public current_stage_name?: string;
}

View File

@@ -12,6 +12,7 @@ export { GithubFork } from './github_fork';
export { GithubIssue } from './github_issue';
export { GithubRepo } from './github_repo';
export { GithubPullRequest } from './github_pull_request';
export { GreenhouseApplication } from './greenhouse_application';
export { NonfungibleDotComTrade } from './nonfungible_dot_com_trade';
export { OHLCVExternal } from './ohlcv_external';
export { Relayer } from './relayer';

View File

@@ -3,33 +3,47 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// Contains trades that come from the Nonfungible.com API.
@Entity({ name: 'nonfungible_dot_com_trades', schema: 'raw' })
export class NonfungibleDotComTrade {
// The hash of the transaction where the trade occurred.
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// The publisher for the trade (e.g. "cryptopunks", "etherbots")
@PrimaryColumn({ name: 'publisher' })
public publisher!: string;
// The block number where the trade occurred.
@PrimaryColumn({ name: 'block_number', type: 'bigint', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// The index of the event log.
@PrimaryColumn({ name: 'log_index' })
public logIndex!: number;
// A unique identifier for the asset.
@PrimaryColumn({ name: 'asset_id' })
public assetId!: string;
// The timestmap of the block where the trade occurred. (In ms since Unix Epoch)
@Column({ name: 'block_timestamp', type: 'bigint', transformer: numberToBigIntTransformer })
public blockTimestamp!: number;
// An arbitrary string describing the asset (may be empty).
@Column({ name: 'asset_descriptor' })
public assetDescriptor!: string;
// The address of the market/smart contract where the trade occurred.
@Column({ name: 'market_address' })
public marketAddress!: string;
// The total price in base units for the asset.
@Column({ name: 'total_price', type: 'numeric', transformer: bigNumberTransformer })
public totalPrice!: BigNumber;
// The estimated USD price for the asset.
@Column({ name: 'usd_price', type: 'numeric', transformer: bigNumberTransformer })
public usdPrice!: BigNumber;
// The address of the buyer.
@Column({ name: 'buyer_address' })
public buyerAddress!: string;
// The address of the seller.
@Column({ name: 'seller_address' })
public sellerAddress!: string;
// Arbitrary, market-specific data corresponding to the trade.
@Column({ type: 'jsonb' })
public meta!: object;
}

View File

@@ -1,21 +1,30 @@
import { Column, Entity, PrimaryColumn } from 'typeorm';
// Contains relayers along with some metadata about them.
@Entity({ name: 'relayers', schema: 'raw' })
export class Relayer {
// A unique identifier for the relayer. Never changes.
@PrimaryColumn() public uuid!: string;
// A human-readable name for the relayer.
@Column() public name!: string;
// The URL for the relayer's home page.
@Column({ name: 'homepage_url', type: 'varchar' })
public homepageUrl!: string;
// HTTP SRA endpoint for the relayer (null for relayers that don't support it).
@Column({ name: 'sra_http_endpoint', type: 'varchar', nullable: true })
public sraHttpEndpoint!: string | null;
// WebSocket SRA endpoint for the relayer (null for relayers that don't support it).
@Column({ name: 'sra_ws_endpoint', type: 'varchar', nullable: true })
public sraWsEndpoint!: string | null;
// Application URL (null for relayers without a separate app url).
@Column({ name: 'app_url', type: 'varchar', nullable: true })
public appUrl!: string | null;
// An array of known fee recipient addresses used by this relayer.
@Column({ name: 'fee_recipient_addresses', type: 'varchar', array: true })
public feeRecipientAddresses!: string[];
// An array of known taker addresses used by this relayer.
@Column({ name: 'taker_addresses', type: 'varchar', array: true })
public takerAddresses!: string[];
}

View File

@@ -4,60 +4,87 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { AssetType } from '../types';
import { bigNumberTransformer } from '../utils';
// Contains orders obtained from an SRA endpoint.
@Entity({ name: 'sra_orders', schema: 'raw' })
export class SraOrder {
// The address of the exchange contract for this order (e.g. might be the
// address of the V1 exchange or the V2 one).
@PrimaryColumn({ name: 'exchange_address' })
public exchangeAddress!: string;
// The hash of the order.
@PrimaryColumn({ name: 'order_hash_hex' })
public orderHashHex!: string;
// The URL of an SRA endpoint where this order was found.
@PrimaryColumn({ name: 'source_url' })
public sourceUrl!: string;
// The adddress of the maker.
@Column({ name: 'maker_address' })
public makerAddress!: string;
// The address of the taker (may be null).
@Column({ name: 'taker_address' })
public takerAddress!: string;
// The address of the fee recepient. Can be used to identify the relayer.
@Column({ name: 'fee_recipient_address' })
public feeRecipientAddress!: string;
// The address of the sender (used for extension contracts).
@Column({ name: 'sender_address' })
public senderAddress!: string;
// The amount of maker_maker asset provided by the maker.
@Column({ name: 'maker_asset_amount', type: 'numeric', transformer: bigNumberTransformer })
public makerAssetAmount!: BigNumber;
// The amount of taker_asset provided by the taker.
@Column({ name: 'taker_asset_amount', type: 'numeric', transformer: bigNumberTransformer })
public takerAssetAmount!: BigNumber;
// The fee paid by the maker.
@Column({ name: 'maker_fee', type: 'numeric', transformer: bigNumberTransformer })
public makerFee!: BigNumber;
// The fee paid by the taker.
@Column({ name: 'taker_fee', type: 'numeric', transformer: bigNumberTransformer })
public takerFee!: BigNumber;
// Timestamp in seconds when the order should be considered expired.
@Column({ name: 'expiration_time_seconds', type: 'numeric', transformer: bigNumberTransformer })
public expirationTimeSeconds!: BigNumber;
// A monotonically increasing unique number (typically a timestamp).
@Column({ name: 'salt', type: 'numeric', transformer: bigNumberTransformer })
public salt!: BigNumber;
// The signature for this order (used for verification).
@Column({ name: 'signature' })
public signature!: string;
// The raw maker asset data.
@Column({ name: 'raw_maker_asset_data' })
public rawMakerAssetData!: string;
// The maker asset type (e.g. 'erc20' or 'erc721').
@Column({ name: 'maker_asset_type' })
public makerAssetType!: AssetType;
// The id of the AssetProxy used for the maker asset.
@Column({ name: 'maker_asset_proxy_id' })
public makerAssetProxyId!: string;
// The address of the maker token.
@Column({ name: 'maker_token_address' })
public makerTokenAddress!: string;
// The id of the maker token (always null for ERC20 tokens).;
@Column({ nullable: true, type: String, name: 'maker_token_id' })
public makerTokenId!: string | null;
// The raw taker asset data.
@Column({ name: 'raw_taker_asset_data' })
public rawTakerAssetData!: string;
// The taker asset type (e.g. 'erc20' or 'erc721').
@Column({ name: 'taker_asset_type' })
public takerAssetType!: AssetType;
// The id of the AssetProxy used for the taker asset.
@Column({ name: 'taker_asset_proxy_id' })
public takerAssetProxyId!: string;
// The address of the taker token.
@Column({ name: 'taker_token_address' })
public takerTokenAddress!: string;
// The id of the taker token (always null for ERC20 tokens).
@Column({ nullable: true, type: String, name: 'taker_token_id' })
public takerTokenId!: string | null;
// TODO(albrow): Make this optional?
// Arbitrary metadata associated with the order.
@Column({ name: 'metadata_json' })
public metadataJson!: string;
}

View File

@@ -4,15 +4,24 @@ import { numberToBigIntTransformer } from '../utils';
import { SraOrder } from './sra_order';
// Contains observed timestamps for SRA orders in the sra_orders table. This can
// be used to determine when an order first appeard on the order book and how
// long it was there.
@Entity({ name: 'sra_orders_observed_timestamps', schema: 'raw' })
export class SraOrdersObservedTimeStamp {
// The address of the exchange contract for this order (e.g. might be the
// address of the V1 exchange or the V2 one).
@PrimaryColumn({ name: 'exchange_address' })
public exchangeAddress!: string;
// The hash of the order.
@PrimaryColumn({ name: 'order_hash_hex' })
public orderHashHex!: string;
// The URL of an SRA endpoint where this order was found.
@PrimaryColumn({ name: 'source_url' })
public sourceUrl!: string;
// The time that the order was observed in the order book. Each order may
// have been observed multiple times.
@PrimaryColumn({ name: 'observed_timestamp', transformer: numberToBigIntTransformer })
public observedTimestamp!: number;
}

View File

@@ -3,20 +3,28 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer } from '../utils/transformers';
// Contains metadata about ERC20 tokens.
// See: https://theethereum.wiki/w/index.php/ERC20_Token_Standard
@Entity({ name: 'token_metadata', schema: 'raw' })
export class TokenMetadata {
// The address of the token contract.
@PrimaryColumn({ type: 'varchar', nullable: false })
public address!: string;
// The "authority" or where this metadata was obtained. Either "0x" for the
// 0x token registry or "metamask" for the MetaMask "Contract Map" list.
@PrimaryColumn({ type: 'varchar', nullable: false })
public authority!: string;
// The number of decimals which determines the "base unit" for the token.
@Column({ type: 'numeric', transformer: bigNumberTransformer, nullable: true })
public decimals!: BigNumber | null;
// A human-readable symbol for the token (e.g. "ZRX")
@Column({ type: 'varchar', nullable: true })
public symbol!: string | null;
// A hunam-readable name for the token (e.g. "0x Protocol")
@Column({ type: 'varchar', nullable: true })
public name!: string | null;
}

View File

@@ -3,28 +3,49 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// Contains orders from an order book snapshot. A "snapshot" is a view of some
// order book at a specific point in time. For most sources, the snapshots use
// "Level 2 Aggregation", which means that orders at the same price are grouped
// together. However, RadarRelay orders use something in between "Level 2
// Aggregation" and "Level 3 Aggregation" where orders are grouped by price and
// maker address. See
// https://datafireball.com/2017/11/29/gdax-orderbook-data-api-level123/ for
// more information about aggregation levels.
@Entity({ name: 'token_orderbook_snapshots', schema: 'raw' })
export class TokenOrderbookSnapshot {
// The timestamp (in ms since Unix Epoch) at which the snapshot that
// contains this order was taken.
@PrimaryColumn({ name: 'observed_timestamp', type: 'bigint', transformer: numberToBigIntTransformer })
public observedTimestamp!: number;
// The source where the orders came from (e.g. "idex", "paradex").
@PrimaryColumn({ name: 'source' })
public source!: string;
// The type of the aggregated orders (either "bid" or "ask").
@PrimaryColumn({ name: 'order_type' })
public orderType!: string;
// The price of the aggregated orders.
@PrimaryColumn({ name: 'price', type: 'numeric', transformer: bigNumberTransformer })
public price!: BigNumber;
// The base asset for the agregated orders.
@PrimaryColumn({ name: 'base_asset_symbol' })
public baseAssetSymbol!: string;
// The quote asset for the agregated orders.
@PrimaryColumn({ name: 'quote_asset_symbol' })
public quoteAssetSymbol!: string;
// The maker address for the aggregated orders. "unknown" for all sources
// except RadarRelay.
@PrimaryColumn({ type: String, name: 'maker_address', default: 'unknown' })
public makerAddress!: string;
// The address of the base asset for the aggregated orders.
@Column({ nullable: true, type: String, name: 'base_asset_address' })
public baseAssetAddress!: string | null;
// The total base volume across all aggregated orders.
@Column({ name: 'base_volume', type: 'numeric', transformer: bigNumberTransformer })
public baseVolume!: BigNumber;
// The address of the quote asset for the aggregated orders.
@Column({ nullable: true, type: String, name: 'quote_asset_address' })
public quoteAssetAddress!: string | null;
// The total quote volume across all aggregated orders.
@Column({ name: 'quote_volume', type: 'numeric', transformer: bigNumberTransformer })
public quoteVolume!: BigNumber;
}

View File

@@ -3,17 +3,24 @@ import { Column, Entity, PrimaryColumn } from 'typeorm';
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
// Transactions are pulled directly from an Ethereum node (or from something
// like Infura).
@Entity({ name: 'transactions', schema: 'raw' })
export class Transaction {
// The hash of the transaction.
@PrimaryColumn({ name: 'transaction_hash' })
public transactionHash!: string;
// The hash of the block this transsaction is a part of.
@PrimaryColumn({ name: 'block_hash' })
public blockHash!: string;
// The number of the block this transaction is a part of.
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
public blockNumber!: number;
// The amount of gas used for the transaction.
@Column({ type: 'numeric', name: 'gas_used', transformer: bigNumberTransformer })
public gasUsed!: BigNumber;
// The gas price paid for the transaction.
@Column({ type: 'numeric', name: 'gas_price', transformer: bigNumberTransformer })
public gasPrice!: BigNumber;
}

View File

@@ -17,6 +17,7 @@ import {
GithubIssue,
GithubPullRequest,
GithubRepo,
GreenhouseApplication,
NonfungibleDotComTrade,
OHLCVExternal,
Relayer,
@@ -30,12 +31,13 @@ import {
const entities = [
Block,
CopperOpportunity,
CopperActivity,
CopperActivityType,
CopperCustomField,
CopperLead,
CopperOpportunity,
DexTrade,
ERC20ApprovalEvent,
EtherscanTransaction,
ExchangeCancelEvent,
ExchangeCancelUpToEvent,
@@ -45,6 +47,7 @@ const entities = [
GithubIssue,
GithubPullRequest,
GithubRepo,
GreenhouseApplication,
NonfungibleDotComTrade,
OHLCVExternal,
Relayer,

View File

@@ -0,0 +1,23 @@
import { GreenhouseApplicationResponse } from '../../data_sources/greenhouse';
import { GreenhouseApplication } from '../../entities';
/**
* One-to-one transformation of Greenhouse API application object to corresponding entity
* @param response an application object from Greenhouse Harvest API
*/
export function parseApplications(response: GreenhouseApplicationResponse): GreenhouseApplication {
return {
id: response.id,
candidate_id: response.candidate_id,
applied_at: new Date(response.applied_at),
rejected_at: response.rejected_at ? new Date(response.rejected_at) : undefined,
last_activity_at: new Date(response.last_activity_at),
source_id: response.source ? response.source.id : undefined,
source_name: response.source ? response.source.public_name : undefined,
credited_to_id: response.credited_to ? response.credited_to.id : undefined,
credited_to_name: response.credited_to ? response.credited_to.name : undefined,
status: response.status.toString(),
current_stage_id: response.current_stage ? response.current_stage.id : undefined,
current_stage_name: response.current_stage ? response.current_stage.name : undefined,
};
}

View File

@@ -0,0 +1,48 @@
/**
* Needed because we store the initial dump of trades in S3, and some projects
* (namely cryptokitties) have dumps that are too big to be transferred easily
* as one big file to and from S3. This script breaks apart a dump file into a
* set of files containing segments of the data. The number of segments is
* based on S3_CHUNK_SIZES specified for each project, or "publisher" in their
* parlance, in ../../data_sources/nonfungible_dot_com/index.ts.
*
* Usage: $ node partition_nonfungible_dot_com_dump.ts publisher
* Example: $ node partition_nonfungible_dot_com_dump.ts cryptokitties
*
* Expects a to find on disk a data file named
* `sales_summary_${publisher}.json`, as emailed by Daniel of nonfungible.com.
*
* Writes to disk a set of files named `sales_summary_${publisher}${N}.json`.
*
* Probably need to use `node` with --max-old-space-size=1024 or maybe
* even more.
*/
import { readFileSync, writeFileSync } from 'fs';
import { splitEvery } from 'ramda';
import { logUtils } from '@0x/utils';
import {
NonfungibleDotComHistoryResponse,
NonfungibleDotComTradeResponse,
S3_CHUNK_SIZES,
} from '../data_sources/nonfungible_dot_com';
(() => {
const publisher = process.argv[2];
const inputFilename = `sales_summary_${publisher}.json`;
logUtils.log(`Reading input file ${inputFilename}`);
const sourceJson: NonfungibleDotComHistoryResponse = JSON.parse(readFileSync(inputFilename).toString());
const chunkSize = S3_CHUNK_SIZES[publisher];
logUtils.log(`Splitting data into chunks of ${chunkSize} trades each`);
const chunks: NonfungibleDotComTradeResponse[][] = splitEvery(chunkSize, sourceJson.data);
logUtils.log(`Writing ${chunks.length} chunks to disk`);
for (let chunkIndex = 0; chunkIndex < chunks.length; chunkIndex++) {
writeFileSync(`sales_summary_${publisher}${chunkIndex}.json`, JSON.stringify(chunks[chunkIndex]));
}
})();

View File

@@ -0,0 +1,66 @@
import * as R from 'ramda';
import { Connection, ConnectionOptions, createConnection } from 'typeorm';
import { logUtils } from '@0x/utils';
import { GreenhouseSource } from '../data_sources/greenhouse';
import { GreenhouseApplication } from '../entities';
import * as ormConfig from '../ormconfig';
import { parseApplications } from '../parsers/greenhouse';
import { handleError } from '../utils';
let connection: Connection;
const GREENHOUSE_FALLBACK_DATE = '2018-09-01';
(async () => {
connection = await createConnection(ormConfig as ConnectionOptions);
const accessToken = process.env.GREENHOUSE_ACCESS_TOKEN;
if (accessToken === undefined) {
throw new Error('Missing required env var: GREENHOUSE_ACCESS_TOKEN');
}
const source = new GreenhouseSource(accessToken);
await fetchAndSaveApplicationsAsync(source);
})().catch(handleError);
async function getStartDateAsync(conn: Connection, sortColumn: string, tableName: string): Promise<Date> {
if (process.env.GREENHOUSE_START_DATE) {
return new Date(process.env.GREENHOUSE_START_DATE);
} else {
const queryResult = await conn.query(`SELECT MAX(${sortColumn}) as _max from ${tableName};`);
if (R.isEmpty(queryResult)) {
return new Date(GREENHOUSE_FALLBACK_DATE);
} else {
return new Date(queryResult[0]._max);
}
}
}
function getEndDate(): Date {
if (process.env.GREENHOUSE_END_DATE) {
return new Date(process.env.GREENHOUSE_END_DATE);
} else {
return new Date();
}
}
async function fetchAndSaveApplicationsAsync(source: GreenhouseSource): Promise<void> {
const repository = connection.getRepository(GreenhouseApplication);
const startTime = await getStartDateAsync(connection, 'last_activity_at', 'raw.greenhouse_applications');
const endTime = getEndDate();
logUtils.log(`Fetching Greenhouse applications starting from ${startTime}...`);
const allApplications = await source.fetchApplicationsAsync(startTime);
const applications = allApplications.filter(app => {
const date = new Date(app.last_activity_at);
return date > startTime && date < endTime;
});
logUtils.log(
`Found ${
applications.length
} updated Greenhouse applications between ${startTime.toISOString()} and ${endTime.toISOString()}...`,
);
const parsed = applications.map(a => parseApplications(a));
await repository.save(parsed);
logUtils.log(`Saved ${parsed.length} Greenhouse applications`);
}

View File

@@ -13,7 +13,7 @@ import { handleError } from '../utils';
const BATCH_SAVE_SIZE = 1000;
// Max requests to make to API per second;
const EDPS_MAX_REQUESTS_PER_SECOND = 1;
const EDPS_MAX_REQUESTS_PER_SECOND = 0.5;
// Maximum requests per second to CryptoCompare
const CRYPTO_COMPARE_MAX_REQS_PER_SECOND = 60;
@@ -31,6 +31,7 @@ let connection: Connection;
connection = await createConnection(ormConfig as ConnectionOptions);
const edpsSource = new EdpsSource(EDPS_MAX_REQUESTS_PER_SECOND);
const cryptoCompareSource = new CryptoCompareOHLCVSource(CRYPTO_COMPARE_MAX_REQS_PER_SECOND);
let hasFailed: boolean = false;
logUtils.log('Fetching slippage records');
const nestedSlippages: Slippage[][][] = await Promise.all(
@@ -49,6 +50,7 @@ let connection: Connection;
} catch (e) {
logUtils.log(`Error getting data for symbol=${symbol}, amount=${amount}`);
logUtils.log(e);
hasFailed = true;
return [new Slippage()];
}
}),
@@ -63,5 +65,5 @@ let connection: Connection;
logUtils.log(`Saving ${slippages.length} records to database`);
await SlippageRepository.save(slippages, { chunk: Math.ceil(slippages.length / BATCH_SAVE_SIZE) });
logUtils.log('Done');
process.exit(0);
process.exit(hasFailed ? 1 : 0);
})().catch(handleError);

View File

@@ -0,0 +1,23 @@
import 'mocha';
import 'reflect-metadata';
import { GreenhouseApplication } from '../../src/entities';
import { createDbConnectionOnceAsync } from '../db_setup';
import { ParsedApplications } from '../fixtures/greenhouse/api_v1_applications';
import { chaiSetup } from '../utils/chai_setup';
import { testSaveAndFindEntityAsync } from './util';
chaiSetup.configure();
describe('Greenhouse entities', () => {
describe('save and find', async () => {
it('Greenhouse application', async () => {
const connection = await createDbConnectionOnceAsync();
const repository = connection.getRepository(GreenhouseApplication);
ParsedApplications.forEach(async entity => {
await testSaveAndFindEntityAsync(repository, entity);
});
});
});
});

View File

@@ -0,0 +1,815 @@
[
{
"status": "rejected",
"source": {
"public_name": "AngelList",
"id": 4000058002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2018-05-17T07:00:00.000Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T00:32:00.138Z",
"jobs": [
{
"name": "Data Scientist",
"id": 4018564002
}
],
"id": 7510796002,
"current_stage": {
"name": "Phone Interview",
"id": 4138361002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 6977554002,
"applied_at": "2018-05-03T17:51:36.660Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "San Francisco, California, United States"
},
"last_activity_at": "2019-03-08T15:10:02.636Z",
"jobs": [
{
"name": "Software Engineer — Blockchain",
"id": 4018550002
}
],
"id": 14858220002,
"current_stage": {
"name": "Outreach",
"id": 4138271002
},
"credited_to": null,
"candidate_id": 13419271002,
"applied_at": "2018-12-11T17:17:06.473Z",
"answers": []
},
{
"status": "rejected",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2019-01-09T22:23:35.136Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "Atlanta, Georgia, United States"
},
"last_activity_at": "2019-03-08T15:10:11.691Z",
"jobs": [
{
"name": "Software Engineer — Back-end",
"id": 4053187002
}
],
"id": 14984854002,
"current_stage": {
"name": "Application Review",
"id": 4393404002
},
"credited_to": null,
"candidate_id": 16301491002,
"applied_at": "2018-12-15T02:19:02.807Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T01:58:37.267Z",
"jobs": [
{
"name": "Corporate Counsel",
"id": 4164900002
}
],
"id": 15719204002,
"current_stage": {
"name": "Debrief",
"id": 5221852002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 14231312002,
"applied_at": "2019-01-10T00:25:20.613Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "London, Greater London, England, United Kingdom"
},
"last_activity_at": "2019-03-08T15:10:10.534Z",
"jobs": [
{
"name": "Product Manager — Growth",
"id": 4114086002
}
],
"id": 16178464002,
"current_stage": {
"name": "Phone Interview",
"id": 4841984002
},
"credited_to": null,
"candidate_id": 14662768002,
"applied_at": "2019-01-21T19:13:05.665Z",
"answers": []
},
{
"status": "rejected",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2019-03-07T18:41:29.967Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:09.636Z",
"jobs": [
{
"name": "Software Engineer — Blockchain",
"id": 4018550002
}
],
"id": 16987221002,
"current_stage": {
"name": "Phone Interview",
"id": 4138252002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 15425402002,
"applied_at": "2019-02-05T05:50:46.504Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T01:07:48.511Z",
"jobs": [
{
"name": "Software Engineer — Blockchain",
"id": 4018550002
}
],
"id": 17110646002,
"current_stage": {
"name": "Take Home Test",
"id": 4138280002
},
"credited_to": {
"name": "Marc Savino",
"last_name": "Savino",
"id": 4243712002,
"first_name": "Marc",
"employee_id": null
},
"candidate_id": 15546609002,
"applied_at": "2019-02-07T02:15:21.653Z",
"answers": []
},
{
"status": "rejected",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2019-03-01T01:18:27.114Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "San Francisco, California, United States"
},
"last_activity_at": "2019-03-08T11:15:26.517Z",
"jobs": [
{
"name": "Product Manager — Growth",
"id": 4114086002
}
],
"id": 17248048002,
"current_stage": {
"name": "Phone Interview",
"id": 4841984002
},
"credited_to": null,
"candidate_id": 15678079002,
"applied_at": "2019-02-10T22:18:05.091Z",
"answers": [
{
"question": "How did you hear about this job?",
"answer": "Angellist"
},
{
"question": "Website",
"answer": null
},
{
"question": "GitHub",
"answer": null
},
{
"question": "LinkedIn Profile",
"answer": null
}
]
},
{
"status": "active",
"source": {
"public_name": "Recurse",
"id": 4008626002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T01:40:50.107Z",
"jobs": [
{
"name": "Software Engineer — Blockchain",
"id": 4018550002
}
],
"id": 17289149002,
"current_stage": {
"name": "Phone Interview",
"id": 4138252002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 15717915002,
"applied_at": "2019-02-11T21:25:51.243Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:19.937Z",
"jobs": [
{
"name": "Quantitative Software Engineer",
"id": 4166565002
}
],
"id": 17311290002,
"current_stage": {
"name": "Preliminary Phone Screen",
"id": 5233631002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 15739177002,
"applied_at": "2019-02-12T04:19:17.173Z",
"answers": []
},
{
"status": "rejected",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2019-02-27T01:27:49.570Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "Los Angeles, California, United States"
},
"last_activity_at": "2019-03-08T15:10:12.111Z",
"jobs": [
{
"name": "Software Engineer — Back-end",
"id": 4053187002
}
],
"id": 17566424002,
"current_stage": {
"name": "Application Review",
"id": 4393404002
},
"credited_to": null,
"candidate_id": 15973199002,
"applied_at": "2019-02-15T01:43:06.450Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:12.111Z",
"jobs": [
{
"name": "Quantitative Software Engineer",
"id": 4166565002
}
],
"id": 17566430002,
"current_stage": {
"name": "Outreach",
"id": 5233629002
},
"credited_to": null,
"candidate_id": 15973199002,
"applied_at": "2019-02-15T01:43:07.628Z",
"answers": []
},
{
"status": "rejected",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2019-02-21T01:31:35.276Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:12.111Z",
"jobs": [
{
"name": "Software Engineer — Blockchain",
"id": 4018550002
}
],
"id": 17566459002,
"current_stage": {
"name": "Application Review",
"id": 4138250002
},
"credited_to": null,
"candidate_id": 15973199002,
"applied_at": "2019-02-15T01:44:05.399Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "New York, New York, United States"
},
"last_activity_at": "2019-03-08T15:10:09.886Z",
"jobs": [
{
"name": "Product Manager — Growth",
"id": 4114086002
}
],
"id": 17595215002,
"current_stage": {
"name": "Phone Interview",
"id": 4841984002
},
"credited_to": null,
"candidate_id": 16001558002,
"applied_at": "2019-02-15T20:42:01.408Z",
"answers": []
},
{
"status": "rejected",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": {
"type": {
"name": "We rejected them",
"id": 4000000002
},
"name": "Lacking skill(s)/qualification(s)",
"id": 4000002002
},
"rejection_details": {},
"rejected_at": "2019-03-08T02:36:03.530Z",
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "Berkeley, California, United States"
},
"last_activity_at": "2019-03-08T02:36:03.584Z",
"jobs": [
{
"name": "Corporate Counsel",
"id": 4164900002
}
],
"id": 17732605002,
"current_stage": {
"name": "Phone Interview",
"id": 5221849002
},
"credited_to": null,
"candidate_id": 16091577002,
"applied_at": "2019-02-18T06:12:04.672Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T18:18:55.989Z",
"jobs": [
{
"name": "Corporate Counsel",
"id": 4164900002
}
],
"id": 17777009002,
"current_stage": {
"name": "Face to Face",
"id": 5221850002
},
"credited_to": null,
"candidate_id": 16134582002,
"applied_at": "2019-02-19T02:26:06.452Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:16.650Z",
"jobs": [
{
"name": "Software Engineer — Blockchain",
"id": 4018550002
}
],
"id": 17875779002,
"current_stage": {
"name": "Phone Interview",
"id": 4138252002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 16224818002,
"applied_at": "2019-02-20T18:55:14.798Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:15.360Z",
"jobs": [
{
"name": "Product Designer",
"id": 4111307002
}
],
"id": 17914377002,
"current_stage": {
"name": "Preliminary Phone Screen",
"id": 4820398002
},
"credited_to": {
"name": "Chris Kalani",
"last_name": "Kalani",
"id": 4147082002,
"first_name": "Chris",
"employee_id": null
},
"candidate_id": 16261232002,
"applied_at": "2019-02-21T05:52:50.097Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "ZenSourcer",
"id": 4000173002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": null,
"last_activity_at": "2019-03-08T15:10:12.746Z",
"jobs": [
{
"name": "Product Designer",
"id": 4111307002
}
],
"id": 17944911002,
"current_stage": {
"name": "Phone Interview",
"id": 4820399002
},
"credited_to": {
"name": "Zack Skelly",
"last_name": "Skelly",
"id": 4069870002,
"first_name": "Zack",
"employee_id": null
},
"candidate_id": 16290422002,
"applied_at": "2019-02-21T21:26:17.865Z",
"answers": []
},
{
"status": "active",
"source": {
"public_name": "Jobs page on your website",
"id": 4000015002
},
"rejection_reason": null,
"rejection_details": null,
"rejected_at": null,
"prospect_detail": {
"prospect_stage": null,
"prospect_pool": null,
"prospect_owner": null
},
"prospect": false,
"location": {
"address": "Atlanta, Georgia, United States"
},
"last_activity_at": "2019-03-08T15:10:11.691Z",
"jobs": [
{
"name": "Data Engineer",
"id": 4194568002
}
],
"id": 17956360002,
"current_stage": {
"name": "Outreach",
"id": 5448217002
},
"credited_to": null,
"candidate_id": 16301491002,
"applied_at": "2019-02-22T03:09:02.181Z",
"answers": [
{
"question": "How did you hear about this job?",
"answer": null
},
{
"question": "Website",
"answer": null
},
{
"question": "GitHub",
"answer": null
},
{
"question": "LinkedIn Profile",
"answer": null
}
]
}
]

View File

@@ -0,0 +1,286 @@
import { GreenhouseApplication } from '../../../src/entities';
const ParsedApplications: GreenhouseApplication[] = [
{
id: 7510796002,
candidate_id: 6977554002,
applied_at: new Date('2018-05-03T17:51:36.660Z'),
rejected_at: new Date('2018-05-17T07:00:00.000Z'),
last_activity_at: new Date('2019-03-08T00:32:00.138Z'),
source_id: 4000058002,
source_name: 'AngelList',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'rejected',
current_stage_id: 4138361002,
current_stage_name: 'Phone Interview',
},
{
id: 14858220002,
candidate_id: 13419271002,
applied_at: new Date('2018-12-11T17:17:06.473Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:02.636Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'active',
current_stage_id: 4138271002,
current_stage_name: 'Outreach',
},
{
id: 14984854002,
candidate_id: 16301491002,
applied_at: new Date('2018-12-15T02:19:02.807Z'),
rejected_at: new Date('2019-01-09T22:23:35.136Z'),
last_activity_at: new Date('2019-03-08T15:10:11.691Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'rejected',
current_stage_id: 4393404002,
current_stage_name: 'Application Review',
},
{
id: 15719204002,
candidate_id: 14231312002,
applied_at: new Date('2019-01-10T00:25:20.613Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T01:58:37.267Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'active',
current_stage_id: 5221852002,
current_stage_name: 'Debrief',
},
{
id: 16178464002,
candidate_id: 14662768002,
applied_at: new Date('2019-01-21T19:13:05.665Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:10.534Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'active',
current_stage_id: 4841984002,
current_stage_name: 'Phone Interview',
},
{
id: 16987221002,
candidate_id: 15425402002,
applied_at: new Date('2019-02-05T05:50:46.504Z'),
rejected_at: new Date('2019-03-07T18:41:29.967Z'),
last_activity_at: new Date('2019-03-08T15:10:09.636Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'rejected',
current_stage_id: 4138252002,
current_stage_name: 'Phone Interview',
},
{
id: 17110646002,
candidate_id: 15546609002,
applied_at: new Date('2019-02-07T02:15:21.653Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T01:07:48.511Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4243712002,
credited_to_name: 'Marc Savino',
status: 'active',
current_stage_id: 4138280002,
current_stage_name: 'Take Home Test',
},
{
id: 17248048002,
candidate_id: 15678079002,
applied_at: new Date('2019-02-10T22:18:05.091Z'),
rejected_at: new Date('2019-03-01T01:18:27.114Z'),
last_activity_at: new Date('2019-03-08T11:15:26.517Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'rejected',
current_stage_id: 4841984002,
current_stage_name: 'Phone Interview',
},
{
id: 17289149002,
candidate_id: 15717915002,
applied_at: new Date('2019-02-11T21:25:51.243Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T01:40:50.107Z'),
source_id: 4008626002,
source_name: 'Recurse',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'active',
current_stage_id: 4138252002,
current_stage_name: 'Phone Interview',
},
{
id: 17311290002,
candidate_id: 15739177002,
applied_at: new Date('2019-02-12T04:19:17.173Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:19.937Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'active',
current_stage_id: 5233631002,
current_stage_name: 'Preliminary Phone Screen',
},
{
id: 17566424002,
candidate_id: 15973199002,
applied_at: new Date('2019-02-15T01:43:06.450Z'),
rejected_at: new Date('2019-02-27T01:27:49.570Z'),
last_activity_at: new Date('2019-03-08T15:10:12.111Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'rejected',
current_stage_id: 4393404002,
current_stage_name: 'Application Review',
},
{
id: 17566430002,
candidate_id: 15973199002,
applied_at: new Date('2019-02-15T01:43:07.628Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:12.111Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'active',
current_stage_id: 5233629002,
current_stage_name: 'Outreach',
},
{
id: 17566459002,
candidate_id: 15973199002,
applied_at: new Date('2019-02-15T01:44:05.399Z'),
rejected_at: new Date('2019-02-21T01:31:35.276Z'),
last_activity_at: new Date('2019-03-08T15:10:12.111Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'rejected',
current_stage_id: 4138250002,
current_stage_name: 'Application Review',
},
{
id: 17595215002,
candidate_id: 16001558002,
applied_at: new Date('2019-02-15T20:42:01.408Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:09.886Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'active',
current_stage_id: 4841984002,
current_stage_name: 'Phone Interview',
},
{
id: 17732605002,
candidate_id: 16091577002,
applied_at: new Date('2019-02-18T06:12:04.672Z'),
rejected_at: new Date('2019-03-08T02:36:03.530Z'),
last_activity_at: new Date('2019-03-08T02:36:03.584Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'rejected',
current_stage_id: 5221849002,
current_stage_name: 'Phone Interview',
},
{
id: 17777009002,
candidate_id: 16134582002,
applied_at: new Date('2019-02-19T02:26:06.452Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T18:18:55.989Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'active',
current_stage_id: 5221850002,
current_stage_name: 'Face to Face',
},
{
id: 17875779002,
candidate_id: 16224818002,
applied_at: new Date('2019-02-20T18:55:14.798Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:16.650Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'active',
current_stage_id: 4138252002,
current_stage_name: 'Phone Interview',
},
{
id: 17914377002,
candidate_id: 16261232002,
applied_at: new Date('2019-02-21T05:52:50.097Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:15.360Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4147082002,
credited_to_name: 'Chris Kalani',
status: 'active',
current_stage_id: 4820398002,
current_stage_name: 'Preliminary Phone Screen',
},
{
id: 17944911002,
candidate_id: 16290422002,
applied_at: new Date('2019-02-21T21:26:17.865Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:12.746Z'),
source_id: 4000173002,
source_name: 'ZenSourcer',
credited_to_id: 4069870002,
credited_to_name: 'Zack Skelly',
status: 'active',
current_stage_id: 4820399002,
current_stage_name: 'Phone Interview',
},
{
id: 17956360002,
candidate_id: 16301491002,
applied_at: new Date('2019-02-22T03:09:02.181Z'),
rejected_at: undefined,
last_activity_at: new Date('2019-03-08T15:10:11.691Z'),
source_id: 4000015002,
source_name: 'Jobs page on your website',
credited_to_id: undefined,
credited_to_name: undefined,
status: 'active',
current_stage_id: 5448217002,
current_stage_name: 'Outreach',
},
];
export { ParsedApplications };

View File

@@ -0,0 +1,22 @@
import * as chai from 'chai';
import 'mocha';
import { GreenhouseApplication } from '../../../src/entities';
import { chaiSetup } from '../../utils/chai_setup';
chaiSetup.configure();
const expect = chai.expect;
import { GreenhouseApplicationResponse } from '../../../src/data_sources/greenhouse';
import { parseApplications } from '../../../src/parsers/greenhouse';
import { ParsedApplications } from '../../fixtures/greenhouse/api_v1_applications';
import * as applicationsApiResponse from '../../fixtures/greenhouse/api_v1_applications.json';
describe('Greenhouse parser', () => {
it('parses API applications responses', () => {
const actual: GreenhouseApplication[] = (applicationsApiResponse as GreenhouseApplicationResponse[]).map(resp =>
parseApplications(resp),
);
expect(actual).deep.equal(ParsedApplications);
});
});

View File

@@ -19,6 +19,7 @@
"./test/fixtures/github/api_v3_forks.json",
"./test/fixtures/github/api_v3_issues.json",
"./test/fixtures/github/api_v3_pulls.json",
"./test/fixtures/github/api_v3_repo.json"
"./test/fixtures/github/api_v3_repo.json",
"./test/fixtures/greenhouse/api_v1_applications.json"
]
}