Merge branch 'development' into feature/contracts-monorepo-7
This commit is contained in:
commit
e72742f1f7
@ -83,6 +83,20 @@ jobs:
|
|||||||
keys:
|
keys:
|
||||||
- repo-{{ .Environment.CIRCLE_SHA1 }}
|
- repo-{{ .Environment.CIRCLE_SHA1 }}
|
||||||
- run: yarn test:generate_docs:circleci
|
- run: yarn test:generate_docs:circleci
|
||||||
|
test-pipeline:
|
||||||
|
docker:
|
||||||
|
- image: circleci/node:9
|
||||||
|
- image: postgres:11-alpine
|
||||||
|
working_directory: ~/repo
|
||||||
|
steps:
|
||||||
|
- restore_cache:
|
||||||
|
keys:
|
||||||
|
- repo-{{ .Environment.CIRCLE_SHA1 }}
|
||||||
|
- run: ZEROEX_DATA_PIPELINE_TEST_DB_URL='postgresql://postgres@localhost/postgres' yarn wsrun test:circleci @0x/pipeline
|
||||||
|
- save_cache:
|
||||||
|
key: coverage-pipeline-{{ .Environment.CIRCLE_SHA1 }}
|
||||||
|
paths:
|
||||||
|
- ~/repo/packages/pipeline/coverage/lcov.info
|
||||||
test-rest:
|
test-rest:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/node:9
|
- image: circleci/node:9
|
||||||
@ -342,6 +356,9 @@ workflows:
|
|||||||
- test-contracts-geth:
|
- test-contracts-geth:
|
||||||
requires:
|
requires:
|
||||||
- build
|
- build
|
||||||
|
- test-pipeline:
|
||||||
|
requires:
|
||||||
|
- build
|
||||||
- test-rest:
|
- test-rest:
|
||||||
requires:
|
requires:
|
||||||
- build
|
- build
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -11,6 +11,10 @@ pids
|
|||||||
*.seed
|
*.seed
|
||||||
*.pid.lock
|
*.pid.lock
|
||||||
|
|
||||||
|
# SQLite database files
|
||||||
|
*.db
|
||||||
|
*.sqlite
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
lib-cov
|
lib-cov
|
||||||
|
|
||||||
|
11
README.md
11
README.md
@ -76,11 +76,12 @@ Visit our [developer portal](https://0xproject.com/docs/order-utils) for a compr
|
|||||||
|
|
||||||
#### Private Packages
|
#### Private Packages
|
||||||
|
|
||||||
| Package | Description |
|
| Package | Description |
|
||||||
| -------------------------------------------------- | ---------------------------------------------------------------- |
|
| -------------------------------------------------- | -------------------------------------------------------------------------------- |
|
||||||
| [`@0x/contracts`](/contracts/core) | 0x protocol solidity smart contracts & tests |
|
| [`@0x/contracts`](/contracts/core) | 0x protocol solidity smart contracts & tests |
|
||||||
| [`@0x/testnet-faucets`](/packages/testnet-faucets) | A faucet micro-service that dispenses test ERC20 tokens or Ether |
|
| [`@0x/instant`](/packages/instant) | A free and flexible way to offer simple crypto purchasing in any app or website. |
|
||||||
| [`@0x/website`](/packages/website) | 0x website |
|
| [`@0x/testnet-faucets`](/packages/testnet-faucets) | A faucet micro-service that dispenses test ERC20 tokens or Ether |
|
||||||
|
| [`@0x/website`](/packages/website) | 0x website |
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
## Contracts interfaces
|
## Contract interfaces
|
||||||
|
|
||||||
Smart contract interfaces of the 0x protocol.
|
Smart contract interfaces of the 0x protocol.
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ describe('MixinSignatureValidator', () => {
|
|||||||
|
|
||||||
it('should revert when signature type is unsupported', async () => {
|
it('should revert when signature type is unsupported', async () => {
|
||||||
const unsupportedSignatureType = SignatureType.NSignatureTypes;
|
const unsupportedSignatureType = SignatureType.NSignatureTypes;
|
||||||
const unsupportedSignatureHex = '0x' + Buffer.from([unsupportedSignatureType]).toString('hex');
|
const unsupportedSignatureHex = `0x${Buffer.from([unsupportedSignatureType]).toString('hex')}`;
|
||||||
const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder);
|
const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder);
|
||||||
return expectContractCallFailedAsync(
|
return expectContractCallFailedAsync(
|
||||||
signatureValidator.publicIsValidSignature.callAsync(
|
signatureValidator.publicIsValidSignature.callAsync(
|
||||||
@ -148,7 +148,7 @@ describe('MixinSignatureValidator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should revert when SignatureType=Illegal', async () => {
|
it('should revert when SignatureType=Illegal', async () => {
|
||||||
const unsupportedSignatureHex = '0x' + Buffer.from([SignatureType.Illegal]).toString('hex');
|
const unsupportedSignatureHex = `0x${Buffer.from([SignatureType.Illegal]).toString('hex')}`;
|
||||||
const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder);
|
const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder);
|
||||||
return expectContractCallFailedAsync(
|
return expectContractCallFailedAsync(
|
||||||
signatureValidator.publicIsValidSignature.callAsync(
|
signatureValidator.publicIsValidSignature.callAsync(
|
||||||
@ -161,7 +161,7 @@ describe('MixinSignatureValidator', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return false when SignatureType=Invalid and signature has a length of zero', async () => {
|
it('should return false when SignatureType=Invalid and signature has a length of zero', async () => {
|
||||||
const signatureHex = '0x' + Buffer.from([SignatureType.Invalid]).toString('hex');
|
const signatureHex = `0x${Buffer.from([SignatureType.Invalid]).toString('hex')}`;
|
||||||
const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder);
|
const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder);
|
||||||
const isValidSignature = await signatureValidator.publicIsValidSignature.callAsync(
|
const isValidSignature = await signatureValidator.publicIsValidSignature.callAsync(
|
||||||
orderHashHex,
|
orderHashHex,
|
||||||
|
@ -229,7 +229,7 @@ export class ExchangeWrapper {
|
|||||||
return orderEpoch;
|
return orderEpoch;
|
||||||
}
|
}
|
||||||
public async getOrderInfoAsync(signedOrder: SignedOrder): Promise<OrderInfo> {
|
public async getOrderInfoAsync(signedOrder: SignedOrder): Promise<OrderInfo> {
|
||||||
const orderInfo = (await this._exchange.getOrderInfo.callAsync(signedOrder)) as OrderInfo;
|
const orderInfo = await this._exchange.getOrderInfo.callAsync(signedOrder);
|
||||||
return orderInfo;
|
return orderInfo;
|
||||||
}
|
}
|
||||||
public async getOrdersInfoAsync(signedOrders: SignedOrder[]): Promise<OrderInfo[]> {
|
public async getOrdersInfoAsync(signedOrders: SignedOrder[]): Promise<OrderInfo[]> {
|
||||||
|
11
contracts/test-utils/CHANGELOG.json
Normal file
11
contracts/test-utils/CHANGELOG.json
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "1.0.1",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
10
contracts/test-utils/CHANGELOG.md
Normal file
10
contracts/test-utils/CHANGELOG.md
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
<!--
|
||||||
|
changelogUtils.file is auto-generated using the monorepo-scripts package. Don't edit directly.
|
||||||
|
Edit the package's CHANGELOG.json file only.
|
||||||
|
-->
|
||||||
|
|
||||||
|
CHANGELOG
|
||||||
|
|
||||||
|
## v1.0.1 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
@ -137,7 +137,7 @@ export async function expectTransactionFailedWithoutReasonAsync(p: sendTransacti
|
|||||||
// directly.
|
// directly.
|
||||||
txReceiptStatus = result.status;
|
txReceiptStatus = result.status;
|
||||||
} else {
|
} else {
|
||||||
throw new Error('Unexpected result type: ' + typeof result);
|
throw new Error(`Unexpected result type: ${typeof result}`);
|
||||||
}
|
}
|
||||||
expect(_.toString(txReceiptStatus)).to.equal(
|
expect(_.toString(txReceiptStatus)).to.equal(
|
||||||
'0',
|
'0',
|
||||||
|
@ -25,7 +25,7 @@ const blockchainLifecycle = new BlockchainLifecycle(web3Wrapper);
|
|||||||
|
|
||||||
// BUG: Ideally we would use Buffer.from(memory).toString('hex')
|
// BUG: Ideally we would use Buffer.from(memory).toString('hex')
|
||||||
// https://github.com/Microsoft/TypeScript/issues/23155
|
// https://github.com/Microsoft/TypeScript/issues/23155
|
||||||
const toHex = (buf: Uint8Array): string => buf.reduce((a, v) => a + ('00' + v.toString(16)).slice(-2), '0x');
|
const toHex = (buf: Uint8Array): string => buf.reduce((a, v) => a + `00${v.toString(16)}`.slice(-2), '0x');
|
||||||
|
|
||||||
const fromHex = (str: string): Uint8Array => Uint8Array.from(Buffer.from(str.slice(2), 'hex'));
|
const fromHex = (str: string): Uint8Array => Uint8Array.from(Buffer.from(str.slice(2), 'hex'));
|
||||||
|
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "2.0.7",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "2.0.6",
|
"version": "2.0.6",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v2.0.7 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v2.0.6 - _November 28, 2018_
|
## v2.0.6 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "2.0.1",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"changes": [
|
"changes": [
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v2.0.1 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v2.0.0 - _November 28, 2018_
|
## v2.0.0 - _November 28, 2018_
|
||||||
|
|
||||||
* Update Exchange artifact to receive ZRX asset data as a constructor argument (#1309)
|
* Update Exchange artifact to receive ZRX asset data as a constructor argument (#1309)
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "1.0.18",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1542821676,
|
"timestamp": 1542821676,
|
||||||
"version": "1.0.17",
|
"version": "1.0.17",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v1.0.18 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v1.0.17 - _November 21, 2018_
|
## v1.0.17 - _November 21, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "1.0.19",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1542821676,
|
"timestamp": 1542821676,
|
||||||
"version": "1.0.18",
|
"version": "1.0.18",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v1.0.19 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v1.0.18 - _November 21, 2018_
|
## v1.0.18 - _November 21, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -5,7 +5,8 @@
|
|||||||
{
|
{
|
||||||
"note": "Update SRA order provider to include Dai"
|
"note": "Update SRA order provider to include Dai"
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"timestamp": 1544482891
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v3.0.3 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Update SRA order provider to include Dai
|
||||||
|
|
||||||
## v3.0.2 - _November 28, 2018_
|
## v3.0.2 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "3.0.9",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "3.0.8",
|
"version": "3.0.8",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v3.0.9 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v3.0.8 - _November 28, 2018_
|
## v3.0.8 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "3.0.9",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "3.0.8",
|
"version": "3.0.8",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v3.0.9 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v3.0.8 - _November 28, 2018_
|
## v3.0.8 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "4.1.2",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "4.1.1",
|
"version": "4.1.1",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v4.1.2 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v4.1.1 - _November 28, 2018_
|
## v4.1.1 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "1.0.20",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "1.0.19",
|
"version": "1.0.19",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v1.0.20 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v1.0.19 - _November 28, 2018_
|
## v1.0.19 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "1.1.3",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"version": "1.1.2",
|
"version": "1.1.2",
|
||||||
"changes": [
|
"changes": [
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v1.1.3 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v1.1.2 - _November 9, 2018_
|
## v1.1.2 - _November 9, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "1.0.15",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "1.0.14",
|
"version": "1.0.14",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v1.0.15 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v1.0.14 - _November 28, 2018_
|
## v1.0.14 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
BIN
packages/instant/.DS_Store
vendored
BIN
packages/instant/.DS_Store
vendored
Binary file not shown.
@ -1,5 +1,9 @@
|
|||||||
## @0x/instant
|
## @0x/instant
|
||||||
|
|
||||||
|
## Integration
|
||||||
|
|
||||||
|
Looking to integrate 0x Instant into your web application or site? Check out the dedicated [instant documentation](https://0xproject.com/wiki#Get-Started-With-Instant) to get started. The documentation covers instant and related topics in depth. For a more "drag and drop" experience, check out our [configurator tool](https://0xproject.com/instant#configure). For on demand developer support, join our [Discord](https://discordapp.com/invite/d3FTX3M).
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
The package is available as a UMD module named `zeroExInstant` at https://instant.0xproject.com/instant.js.
|
The package is available as a UMD module named `zeroExInstant` at https://instant.0xproject.com/instant.js.
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "webpack --mode production",
|
"build": "webpack --mode production",
|
||||||
"build:ci": "yarn build",
|
"build:ci": "yarn build",
|
||||||
"dev": "webpack-dev-server --mode development",
|
"dev": "dotenv webpack-dev-server -- --mode development",
|
||||||
"lint": "tslint --format stylish --project .",
|
"lint": "tslint --format stylish --project .",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:coverage": "jest --coverage",
|
"test:coverage": "jest --coverage",
|
||||||
@ -24,10 +24,7 @@
|
|||||||
},
|
},
|
||||||
"config": {
|
"config": {
|
||||||
"postpublish": {
|
"postpublish": {
|
||||||
"assets": [
|
"assets": ["packages/instant/umd/instant.js", "packages/instant/umd/instant.js.map"]
|
||||||
"packages/instant/umd/instant.js",
|
|
||||||
"packages/instant/umd/instant.js.map"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -15,6 +15,10 @@ input {
|
|||||||
height: 100px;
|
height: 100px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
input::-webkit-input-placeholder {
|
||||||
|
color: #b4b4b4 !important;
|
||||||
|
}
|
||||||
|
|
||||||
div {
|
div {
|
||||||
padding: 3px;
|
padding: 3px;
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,6 @@ import { analytics } from '../util/analytics';
|
|||||||
import { assetUtils } from '../util/asset';
|
import { assetUtils } from '../util/asset';
|
||||||
|
|
||||||
import { SearchInput } from './search_input';
|
import { SearchInput } from './search_input';
|
||||||
|
|
||||||
import { Circle } from './ui/circle';
|
import { Circle } from './ui/circle';
|
||||||
import { Container } from './ui/container';
|
import { Container } from './ui/container';
|
||||||
import { Flex } from './ui/flex';
|
import { Flex } from './ui/flex';
|
||||||
@ -123,10 +122,20 @@ interface TokenSelectorRowIconProps {
|
|||||||
token: ERC20Asset;
|
token: ERC20Asset;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getTokenIcon = (symbol: string): React.StatelessComponent | undefined => {
|
||||||
|
try {
|
||||||
|
return require(`../assets/icons/${symbol}.svg`) as React.StatelessComponent;
|
||||||
|
} catch (e) {
|
||||||
|
// Can't find icon
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const TokenSelectorRowIcon: React.StatelessComponent<TokenSelectorRowIconProps> = props => {
|
const TokenSelectorRowIcon: React.StatelessComponent<TokenSelectorRowIconProps> = props => {
|
||||||
const { token } = props;
|
const { token } = props;
|
||||||
const iconUrlIfExists = token.metaData.iconUrl;
|
const iconUrlIfExists = token.metaData.iconUrl;
|
||||||
const TokenIcon = require(`../assets/icons/${token.metaData.symbol}.svg`);
|
|
||||||
|
const TokenIcon = getTokenIcon(token.metaData.symbol);
|
||||||
const displaySymbol = assetUtils.bestNameForAsset(token);
|
const displaySymbol = assetUtils.bestNameForAsset(token);
|
||||||
if (!_.isUndefined(iconUrlIfExists)) {
|
if (!_.isUndefined(iconUrlIfExists)) {
|
||||||
return <img src={iconUrlIfExists} />;
|
return <img src={iconUrlIfExists} />;
|
||||||
|
@ -61,12 +61,19 @@ export class InstantHeading extends React.Component<InstantHeadingProps, {}> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private _renderAmountsSection(): React.ReactNode {
|
private _renderAmountsSection(): React.ReactNode {
|
||||||
return (
|
if (
|
||||||
<Container>
|
_.isUndefined(this.props.totalEthBaseUnitAmount) &&
|
||||||
<Container marginBottom="5px">{this._renderPlaceholderOrAmount(this._renderEthAmount)}</Container>
|
this.props.quoteRequestState !== AsyncProcessState.Pending
|
||||||
<Container opacity={0.7}>{this._renderPlaceholderOrAmount(this._renderDollarAmount)}</Container>
|
) {
|
||||||
</Container>
|
return null;
|
||||||
);
|
} else {
|
||||||
|
return (
|
||||||
|
<Container>
|
||||||
|
<Container marginBottom="5px">{this._renderPlaceholderOrAmount(this._renderEthAmount)}</Container>
|
||||||
|
<Container opacity={0.7}>{this._renderPlaceholderOrAmount(this._renderDollarAmount)}</Container>
|
||||||
|
</Container>
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private _renderIcon(): React.ReactNode {
|
private _renderIcon(): React.ReactNode {
|
||||||
|
@ -29,8 +29,8 @@ export const Input =
|
|||||||
outline: none;
|
outline: none;
|
||||||
border: none;
|
border: none;
|
||||||
&::placeholder {
|
&::placeholder {
|
||||||
color: ${props => props.theme[props.fontColor || 'white']};
|
color: ${props => props.theme[props.fontColor || 'white']} !important;
|
||||||
opacity: 0.5;
|
opacity: 0.5 !important;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
import { ObjectMap } from '@0x/types';
|
|
||||||
import { BigNumber } from '@0x/utils';
|
import { BigNumber } from '@0x/utils';
|
||||||
import { Provider } from 'ethereum-types';
|
|
||||||
import * as _ from 'lodash';
|
import * as _ from 'lodash';
|
||||||
import * as React from 'react';
|
import * as React from 'react';
|
||||||
import { Provider as ReduxProvider } from 'react-redux';
|
import { Provider as ReduxProvider } from 'react-redux';
|
||||||
@ -11,7 +9,7 @@ import { asyncData } from '../redux/async_data';
|
|||||||
import { DEFAULT_STATE, DefaultState, State } from '../redux/reducer';
|
import { DEFAULT_STATE, DefaultState, State } from '../redux/reducer';
|
||||||
import { store, Store } from '../redux/store';
|
import { store, Store } from '../redux/store';
|
||||||
import { fonts } from '../style/fonts';
|
import { fonts } from '../style/fonts';
|
||||||
import { AccountState, AffiliateInfo, AssetMetaData, Network, OrderSource, QuoteFetchOrigin } from '../types';
|
import { AccountState, Network, QuoteFetchOrigin, ZeroExInstantBaseConfig } from '../types';
|
||||||
import { analytics, disableAnalytics } from '../util/analytics';
|
import { analytics, disableAnalytics } from '../util/analytics';
|
||||||
import { assetUtils } from '../util/asset';
|
import { assetUtils } from '../util/asset';
|
||||||
import { errorFlasher } from '../util/error_flasher';
|
import { errorFlasher } from '../util/error_flasher';
|
||||||
@ -21,24 +19,7 @@ import { Heartbeater } from '../util/heartbeater';
|
|||||||
import { generateAccountHeartbeater, generateBuyQuoteHeartbeater } from '../util/heartbeater_factory';
|
import { generateAccountHeartbeater, generateBuyQuoteHeartbeater } from '../util/heartbeater_factory';
|
||||||
import { providerStateFactory } from '../util/provider_state_factory';
|
import { providerStateFactory } from '../util/provider_state_factory';
|
||||||
|
|
||||||
export type ZeroExInstantProviderProps = ZeroExInstantProviderRequiredProps &
|
export type ZeroExInstantProviderProps = ZeroExInstantBaseConfig;
|
||||||
Partial<ZeroExInstantProviderOptionalProps>;
|
|
||||||
|
|
||||||
export interface ZeroExInstantProviderRequiredProps {
|
|
||||||
orderSource: OrderSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ZeroExInstantProviderOptionalProps {
|
|
||||||
provider: Provider;
|
|
||||||
walletDisplayName: string;
|
|
||||||
availableAssetDatas: string[];
|
|
||||||
defaultAssetBuyAmount: number;
|
|
||||||
defaultSelectedAssetData: string;
|
|
||||||
additionalAssetMetaDataMap: ObjectMap<AssetMetaData>;
|
|
||||||
networkId: Network;
|
|
||||||
affiliateInfo: AffiliateInfo;
|
|
||||||
shouldDisableAnalyticsTracking: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ZeroExInstantProvider extends React.Component<ZeroExInstantProviderProps> {
|
export class ZeroExInstantProvider extends React.Component<ZeroExInstantProviderProps> {
|
||||||
private readonly _store: Store;
|
private readonly _store: Store;
|
||||||
@ -57,10 +38,12 @@ export class ZeroExInstantProvider extends React.Component<ZeroExInstantProvider
|
|||||||
props.orderSource,
|
props.orderSource,
|
||||||
networkId,
|
networkId,
|
||||||
props.provider,
|
props.provider,
|
||||||
|
props.walletDisplayName,
|
||||||
);
|
);
|
||||||
// merge the additional additionalAssetMetaDataMap with our default map
|
// merge the additional additionalAssetMetaDataMap with our default map
|
||||||
const completeAssetMetaDataMap = {
|
const completeAssetMetaDataMap = {
|
||||||
...props.additionalAssetMetaDataMap,
|
// Make sure the passed in assetDatas are lower case
|
||||||
|
..._.mapKeys(props.additionalAssetMetaDataMap || {}, (value, key) => key.toLowerCase()),
|
||||||
...defaultState.assetMetaDataMap,
|
...defaultState.assetMetaDataMap,
|
||||||
};
|
};
|
||||||
// construct the final state
|
// construct the final state
|
||||||
|
@ -15,6 +15,7 @@ export const GWEI_IN_WEI = new BigNumber(1000000000);
|
|||||||
export const ONE_SECOND_MS = 1000;
|
export const ONE_SECOND_MS = 1000;
|
||||||
export const ONE_MINUTE_MS = ONE_SECOND_MS * 60;
|
export const ONE_MINUTE_MS = ONE_SECOND_MS * 60;
|
||||||
export const GIT_SHA = process.env.GIT_SHA;
|
export const GIT_SHA = process.env.GIT_SHA;
|
||||||
|
export const NODE_ENV = process.env.NODE_ENV;
|
||||||
export const NPM_PACKAGE_VERSION = process.env.NPM_PACKAGE_VERSION;
|
export const NPM_PACKAGE_VERSION = process.env.NPM_PACKAGE_VERSION;
|
||||||
export const ACCOUNT_UPDATE_INTERVAL_TIME_MS = ONE_SECOND_MS * 5;
|
export const ACCOUNT_UPDATE_INTERVAL_TIME_MS = ONE_SECOND_MS * 5;
|
||||||
export const BUY_QUOTE_UPDATE_INTERVAL_TIME_MS = ONE_SECOND_MS * 15;
|
export const BUY_QUOTE_UPDATE_INTERVAL_TIME_MS = ONE_SECOND_MS * 15;
|
||||||
@ -28,14 +29,12 @@ export const HEAP_ENABLED = process.env.HEAP_ENABLED;
|
|||||||
export const COINBASE_API_BASE_URL = 'https://api.coinbase.com/v2';
|
export const COINBASE_API_BASE_URL = 'https://api.coinbase.com/v2';
|
||||||
export const PROGRESS_STALL_AT_WIDTH = '95%';
|
export const PROGRESS_STALL_AT_WIDTH = '95%';
|
||||||
export const PROGRESS_FINISH_ANIMATION_TIME_MS = 200;
|
export const PROGRESS_FINISH_ANIMATION_TIME_MS = 200;
|
||||||
export const HOST_DOMAINS = [
|
export const HOST_DOMAINS_EXTERNAL = [
|
||||||
'0x-instant-staging.s3-website-us-east-1.amazonaws.com',
|
'0x-instant-staging.s3-website-us-east-1.amazonaws.com',
|
||||||
'0x-instant-dogfood.s3-website-us-east-1.amazonaws.com',
|
'0x-instant-dogfood.s3-website-us-east-1.amazonaws.com',
|
||||||
'localhost',
|
|
||||||
'127.0.0.1',
|
|
||||||
'0.0.0.0',
|
|
||||||
'instant.0xproject.com',
|
'instant.0xproject.com',
|
||||||
];
|
];
|
||||||
|
export const HOST_DOMAINS_LOCAL = ['localhost', '127.0.0.1', '0.0.0.0'];
|
||||||
export const ROLLBAR_CLIENT_TOKEN = process.env.ROLLBAR_CLIENT_TOKEN;
|
export const ROLLBAR_CLIENT_TOKEN = process.env.ROLLBAR_CLIENT_TOKEN;
|
||||||
export const ROLLBAR_ENABLED = process.env.ROLLBAR_ENABLED;
|
export const ROLLBAR_ENABLED = process.env.ROLLBAR_ENABLED;
|
||||||
export const INSTANT_DISCHARGE_TARGET = process.env.INSTANT_DISCHARGE_TARGET as
|
export const INSTANT_DISCHARGE_TARGET = process.env.INSTANT_DISCHARGE_TARGET as
|
||||||
|
@ -58,7 +58,7 @@ const mergeProps = (
|
|||||||
...ownProps,
|
...ownProps,
|
||||||
network: connectedState.network,
|
network: connectedState.network,
|
||||||
account: connectedState.providerState.account,
|
account: connectedState.providerState.account,
|
||||||
walletDisplayName: connectedState.walletDisplayName || connectedState.providerState.name,
|
walletDisplayName: connectedState.providerState.displayName,
|
||||||
onUnlockWalletClick: () => connectedDispatch.unlockWalletAndDispatchToStore(connectedState.providerState),
|
onUnlockWalletClick: () => connectedDispatch.unlockWalletAndDispatchToStore(connectedState.providerState),
|
||||||
onInstallWalletClick: () => {
|
onInstallWalletClick: () => {
|
||||||
const isMobile = envUtil.isMobileOperatingSystem();
|
const isMobile = envUtil.isMobileOperatingSystem();
|
||||||
|
@ -102,6 +102,7 @@ export interface AffiliateInfo {
|
|||||||
|
|
||||||
export interface ProviderState {
|
export interface ProviderState {
|
||||||
name: string;
|
name: string;
|
||||||
|
displayName: string;
|
||||||
provider: Provider;
|
provider: Provider;
|
||||||
assetBuyer: AssetBuyer;
|
assetBuyer: AssetBuyer;
|
||||||
web3Wrapper: Web3Wrapper;
|
web3Wrapper: Web3Wrapper;
|
||||||
@ -177,3 +178,21 @@ export enum ProviderType {
|
|||||||
Cipher = 'CIPHER',
|
Cipher = 'CIPHER',
|
||||||
Fallback = 'FALLBACK',
|
Fallback = 'FALLBACK',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ZeroExInstantRequiredBaseConfig {
|
||||||
|
orderSource: OrderSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ZeroExInstantOptionalBaseConfig {
|
||||||
|
provider: Provider;
|
||||||
|
walletDisplayName: string;
|
||||||
|
availableAssetDatas: string[];
|
||||||
|
defaultAssetBuyAmount: number;
|
||||||
|
defaultSelectedAssetData: string;
|
||||||
|
additionalAssetMetaDataMap: ObjectMap<AssetMetaData>;
|
||||||
|
networkId: Network;
|
||||||
|
affiliateInfo: AffiliateInfo;
|
||||||
|
shouldDisableAnalyticsTracking: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ZeroExInstantBaseConfig = ZeroExInstantRequiredBaseConfig & Partial<ZeroExInstantOptionalBaseConfig>;
|
||||||
|
@ -2,7 +2,7 @@ import { BuyQuote } from '@0x/asset-buyer';
|
|||||||
import { BigNumber } from '@0x/utils';
|
import { BigNumber } from '@0x/utils';
|
||||||
import * as _ from 'lodash';
|
import * as _ from 'lodash';
|
||||||
|
|
||||||
import { GIT_SHA, HEAP_ENABLED, INSTANT_DISCHARGE_TARGET, NPM_PACKAGE_VERSION } from '../constants';
|
import { GIT_SHA, HEAP_ENABLED, INSTANT_DISCHARGE_TARGET, NODE_ENV, NPM_PACKAGE_VERSION } from '../constants';
|
||||||
import {
|
import {
|
||||||
AffiliateInfo,
|
AffiliateInfo,
|
||||||
Asset,
|
Asset,
|
||||||
@ -106,6 +106,7 @@ export interface AnalyticsEventOptions {
|
|||||||
ethAddress?: string;
|
ethAddress?: string;
|
||||||
networkId?: number;
|
networkId?: number;
|
||||||
providerName?: string;
|
providerName?: string;
|
||||||
|
providerDisplayName?: string;
|
||||||
gitSha?: string;
|
gitSha?: string;
|
||||||
npmVersion?: string;
|
npmVersion?: string;
|
||||||
instantEnvironment?: string;
|
instantEnvironment?: string;
|
||||||
@ -149,6 +150,7 @@ export const analytics = {
|
|||||||
embeddedUrl: window.location.href,
|
embeddedUrl: window.location.href,
|
||||||
networkId: network,
|
networkId: network,
|
||||||
providerName: providerState.name,
|
providerName: providerState.name,
|
||||||
|
providerDisplayName: providerState.displayName,
|
||||||
gitSha: GIT_SHA,
|
gitSha: GIT_SHA,
|
||||||
npmVersion: NPM_PACKAGE_VERSION,
|
npmVersion: NPM_PACKAGE_VERSION,
|
||||||
orderSource: orderSourceName,
|
orderSource: orderSourceName,
|
||||||
@ -156,7 +158,7 @@ export const analytics = {
|
|||||||
affiliateFeePercent,
|
affiliateFeePercent,
|
||||||
selectedAssetName: selectedAsset ? selectedAsset.metaData.name : 'none',
|
selectedAssetName: selectedAsset ? selectedAsset.metaData.name : 'none',
|
||||||
selectedAssetData: selectedAsset ? selectedAsset.assetData : 'none',
|
selectedAssetData: selectedAsset ? selectedAsset.assetData : 'none',
|
||||||
instantEnvironment: INSTANT_DISCHARGE_TARGET || `Local ${process.env.NODE_ENV}`,
|
instantEnvironment: INSTANT_DISCHARGE_TARGET || `Local ${NODE_ENV}`,
|
||||||
};
|
};
|
||||||
return eventOptions;
|
return eventOptions;
|
||||||
},
|
},
|
||||||
|
@ -26,7 +26,7 @@ export const assetUtils = {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
assetData,
|
assetData: assetData.toLowerCase(),
|
||||||
metaData,
|
metaData,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
@ -36,7 +36,7 @@ export const assetUtils = {
|
|||||||
network: Network,
|
network: Network,
|
||||||
): Asset => {
|
): Asset => {
|
||||||
return {
|
return {
|
||||||
assetData,
|
assetData: assetData.toLowerCase(),
|
||||||
metaData: assetUtils.getMetaDataOrThrow(assetData, assetMetaDataMap, network),
|
metaData: assetUtils.getMetaDataOrThrow(assetData, assetMetaDataMap, network),
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
@ -38,14 +38,11 @@ export const buyQuoteUpdater = {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = assetUtils.assetBuyerErrorMessage(asset, error);
|
const errorMessage = assetUtils.assetBuyerErrorMessage(asset, error);
|
||||||
|
|
||||||
if (_.isUndefined(errorMessage)) {
|
errorReporter.report(error);
|
||||||
// This is an unknown error, report it to rollbar
|
analytics.trackQuoteError(error.message ? error.message : 'other', baseUnitValue, fetchOrigin);
|
||||||
errorReporter.report(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.dispatchErrors) {
|
if (options.dispatchErrors) {
|
||||||
dispatch(actions.setQuoteRequestStateFailure());
|
dispatch(actions.setQuoteRequestStateFailure());
|
||||||
analytics.trackQuoteError(error.message ? error.message : 'other', baseUnitValue, fetchOrigin);
|
|
||||||
errorFlasher.flashNewErrorMessage(dispatch, errorMessage || 'Error fetching price, please try again');
|
errorFlasher.flashNewErrorMessage(dispatch, errorMessage || 'Error fetching price, please try again');
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
|
@ -62,4 +62,11 @@ export const envUtil = {
|
|||||||
}
|
}
|
||||||
return PROVIDER_TYPE_TO_NAME[providerTypeIfExists];
|
return PROVIDER_TYPE_TO_NAME[providerTypeIfExists];
|
||||||
},
|
},
|
||||||
|
getProviderDisplayName(provider: Provider): string {
|
||||||
|
const providerTypeIfExists = envUtil.getProviderType(provider);
|
||||||
|
if (_.isUndefined(providerTypeIfExists)) {
|
||||||
|
return 'Wallet';
|
||||||
|
}
|
||||||
|
return PROVIDER_TYPE_TO_NAME[providerTypeIfExists];
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
@ -1,17 +1,34 @@
|
|||||||
import { logUtils } from '@0x/utils';
|
import { logUtils } from '@0x/utils';
|
||||||
import * as _ from 'lodash';
|
import * as _ from 'lodash';
|
||||||
|
|
||||||
import { GIT_SHA, HOST_DOMAINS, INSTANT_DISCHARGE_TARGET, ROLLBAR_CLIENT_TOKEN, ROLLBAR_ENABLED } from '../constants';
|
import {
|
||||||
|
GIT_SHA,
|
||||||
|
HOST_DOMAINS_EXTERNAL,
|
||||||
|
HOST_DOMAINS_LOCAL,
|
||||||
|
INSTANT_DISCHARGE_TARGET,
|
||||||
|
NODE_ENV,
|
||||||
|
ROLLBAR_CLIENT_TOKEN,
|
||||||
|
ROLLBAR_ENABLED,
|
||||||
|
} from '../constants';
|
||||||
|
|
||||||
// Import version of Rollbar designed for embedded components
|
// Import version of Rollbar designed for embedded components
|
||||||
// See https://docs.rollbar.com/docs/using-rollbarjs-inside-an-embedded-component
|
// See https://docs.rollbar.com/docs/using-rollbarjs-inside-an-embedded-component
|
||||||
// tslint:disable-next-line:no-var-requires
|
// tslint:disable-next-line:no-var-requires
|
||||||
const Rollbar = require('rollbar/dist/rollbar.noconflict.umd');
|
const Rollbar = require('rollbar/dist/rollbar.noconflict.umd');
|
||||||
|
|
||||||
|
const getRollbarHostDomains = (): string[] => {
|
||||||
|
if (NODE_ENV === 'development') {
|
||||||
|
return HOST_DOMAINS_EXTERNAL.concat(HOST_DOMAINS_LOCAL);
|
||||||
|
} else {
|
||||||
|
return HOST_DOMAINS_EXTERNAL;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let rollbar: any;
|
let rollbar: any;
|
||||||
// Configures rollbar and sets up error catching
|
// Configures rollbar and sets up error catching
|
||||||
export const setupRollbar = (): any => {
|
export const setupRollbar = (): any => {
|
||||||
if (_.isUndefined(rollbar) && ROLLBAR_CLIENT_TOKEN && ROLLBAR_ENABLED) {
|
if (_.isUndefined(rollbar) && ROLLBAR_CLIENT_TOKEN && ROLLBAR_ENABLED) {
|
||||||
|
const hostDomains = getRollbarHostDomains();
|
||||||
rollbar = new Rollbar({
|
rollbar = new Rollbar({
|
||||||
accessToken: ROLLBAR_CLIENT_TOKEN,
|
accessToken: ROLLBAR_CLIENT_TOKEN,
|
||||||
captureUncaught: true,
|
captureUncaught: true,
|
||||||
@ -20,7 +37,7 @@ export const setupRollbar = (): any => {
|
|||||||
itemsPerMinute: 10,
|
itemsPerMinute: 10,
|
||||||
maxItems: 500,
|
maxItems: 500,
|
||||||
payload: {
|
payload: {
|
||||||
environment: INSTANT_DISCHARGE_TARGET || `Local ${process.env.NODE_ENV}`,
|
environment: INSTANT_DISCHARGE_TARGET || `Local ${NODE_ENV}`,
|
||||||
client: {
|
client: {
|
||||||
javascript: {
|
javascript: {
|
||||||
source_map_enabled: true,
|
source_map_enabled: true,
|
||||||
@ -29,7 +46,7 @@ export const setupRollbar = (): any => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
hostWhiteList: HOST_DOMAINS,
|
hostWhiteList: hostDomains,
|
||||||
uncaughtErrorLevel: 'error',
|
uncaughtErrorLevel: 'error',
|
||||||
ignoredMessages: [
|
ignoredMessages: [
|
||||||
// Errors from the third-party scripts
|
// Errors from the third-party scripts
|
||||||
|
@ -10,27 +10,40 @@ import { assetBuyerFactory } from './asset_buyer_factory';
|
|||||||
import { providerFactory } from './provider_factory';
|
import { providerFactory } from './provider_factory';
|
||||||
|
|
||||||
export const providerStateFactory = {
|
export const providerStateFactory = {
|
||||||
getInitialProviderState: (orderSource: OrderSource, network: Network, provider?: Provider): ProviderState => {
|
getInitialProviderState: (
|
||||||
|
orderSource: OrderSource,
|
||||||
|
network: Network,
|
||||||
|
provider?: Provider,
|
||||||
|
walletDisplayName?: string,
|
||||||
|
): ProviderState => {
|
||||||
if (!_.isUndefined(provider)) {
|
if (!_.isUndefined(provider)) {
|
||||||
return providerStateFactory.getInitialProviderStateFromProvider(orderSource, network, provider);
|
return providerStateFactory.getInitialProviderStateFromProvider(
|
||||||
|
orderSource,
|
||||||
|
network,
|
||||||
|
provider,
|
||||||
|
walletDisplayName,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
const providerStateFromWindowIfExits = providerStateFactory.getInitialProviderStateFromWindowIfExists(
|
const providerStateFromWindowIfExits = providerStateFactory.getInitialProviderStateFromWindowIfExists(
|
||||||
orderSource,
|
orderSource,
|
||||||
network,
|
network,
|
||||||
|
walletDisplayName,
|
||||||
);
|
);
|
||||||
if (providerStateFromWindowIfExits) {
|
if (providerStateFromWindowIfExits) {
|
||||||
return providerStateFromWindowIfExits;
|
return providerStateFromWindowIfExits;
|
||||||
} else {
|
} else {
|
||||||
return providerStateFactory.getInitialProviderStateFallback(orderSource, network);
|
return providerStateFactory.getInitialProviderStateFallback(orderSource, network, walletDisplayName);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
getInitialProviderStateFromProvider: (
|
getInitialProviderStateFromProvider: (
|
||||||
orderSource: OrderSource,
|
orderSource: OrderSource,
|
||||||
network: Network,
|
network: Network,
|
||||||
provider: Provider,
|
provider: Provider,
|
||||||
|
walletDisplayName?: string,
|
||||||
): ProviderState => {
|
): ProviderState => {
|
||||||
const providerState: ProviderState = {
|
const providerState: ProviderState = {
|
||||||
name: envUtil.getProviderName(provider),
|
name: envUtil.getProviderName(provider),
|
||||||
|
displayName: walletDisplayName || envUtil.getProviderDisplayName(provider),
|
||||||
provider,
|
provider,
|
||||||
web3Wrapper: new Web3Wrapper(provider),
|
web3Wrapper: new Web3Wrapper(provider),
|
||||||
assetBuyer: assetBuyerFactory.getAssetBuyer(provider, orderSource, network),
|
assetBuyer: assetBuyerFactory.getAssetBuyer(provider, orderSource, network),
|
||||||
@ -38,11 +51,16 @@ export const providerStateFactory = {
|
|||||||
};
|
};
|
||||||
return providerState;
|
return providerState;
|
||||||
},
|
},
|
||||||
getInitialProviderStateFromWindowIfExists: (orderSource: OrderSource, network: Network): Maybe<ProviderState> => {
|
getInitialProviderStateFromWindowIfExists: (
|
||||||
|
orderSource: OrderSource,
|
||||||
|
network: Network,
|
||||||
|
walletDisplayName?: string,
|
||||||
|
): Maybe<ProviderState> => {
|
||||||
const injectedProviderIfExists = providerFactory.getInjectedProviderIfExists();
|
const injectedProviderIfExists = providerFactory.getInjectedProviderIfExists();
|
||||||
if (!_.isUndefined(injectedProviderIfExists)) {
|
if (!_.isUndefined(injectedProviderIfExists)) {
|
||||||
const providerState: ProviderState = {
|
const providerState: ProviderState = {
|
||||||
name: envUtil.getProviderName(injectedProviderIfExists),
|
name: envUtil.getProviderName(injectedProviderIfExists),
|
||||||
|
displayName: walletDisplayName || envUtil.getProviderDisplayName(injectedProviderIfExists),
|
||||||
provider: injectedProviderIfExists,
|
provider: injectedProviderIfExists,
|
||||||
web3Wrapper: new Web3Wrapper(injectedProviderIfExists),
|
web3Wrapper: new Web3Wrapper(injectedProviderIfExists),
|
||||||
assetBuyer: assetBuyerFactory.getAssetBuyer(injectedProviderIfExists, orderSource, network),
|
assetBuyer: assetBuyerFactory.getAssetBuyer(injectedProviderIfExists, orderSource, network),
|
||||||
@ -53,10 +71,15 @@ export const providerStateFactory = {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
getInitialProviderStateFallback: (orderSource: OrderSource, network: Network): ProviderState => {
|
getInitialProviderStateFallback: (
|
||||||
|
orderSource: OrderSource,
|
||||||
|
network: Network,
|
||||||
|
walletDisplayName?: string,
|
||||||
|
): ProviderState => {
|
||||||
const provider = providerFactory.getFallbackNoSigningProvider(network);
|
const provider = providerFactory.getFallbackNoSigningProvider(network);
|
||||||
const providerState: ProviderState = {
|
const providerState: ProviderState = {
|
||||||
name: 'Fallback',
|
name: 'Fallback',
|
||||||
|
displayName: walletDisplayName || envUtil.getProviderDisplayName(provider),
|
||||||
provider,
|
provider,
|
||||||
web3Wrapper: new Web3Wrapper(provider),
|
web3Wrapper: new Web3Wrapper(provider),
|
||||||
assetBuyer: assetBuyerFactory.getAssetBuyer(provider, orderSource, network),
|
assetBuyer: assetBuyerFactory.getAssetBuyer(provider, orderSource, network),
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "2.1.3",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1542821676,
|
"timestamp": 1542821676,
|
||||||
"version": "2.1.2",
|
"version": "2.1.2",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v2.1.3 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v2.1.2 - _November 21, 2018_
|
## v2.1.2 - _November 21, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "2.2.1",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"version": "2.2.0",
|
"version": "2.2.0",
|
||||||
"changes": [
|
"changes": [
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v2.2.1 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v2.2.0 - _November 28, 2018_
|
## v2.2.0 - _November 28, 2018_
|
||||||
|
|
||||||
* Add CLI `0x-migrate` for running the 0x migrations in a language-agnostic way (#1324)
|
* Add CLI `0x-migrate` for running the 0x migrations in a language-agnostic way (#1324)
|
||||||
|
@ -98,7 +98,7 @@ async function testInstallPackageAsync(
|
|||||||
const lastChangelogVersion = JSON.parse(fs.readFileSync(changelogPath).toString())[0].version;
|
const lastChangelogVersion = JSON.parse(fs.readFileSync(changelogPath).toString())[0].version;
|
||||||
const packageName = installablePackage.packageJson.name;
|
const packageName = installablePackage.packageJson.name;
|
||||||
utils.log(`Testing ${packageName}@${lastChangelogVersion}`);
|
utils.log(`Testing ${packageName}@${lastChangelogVersion}`);
|
||||||
const packageDirName = path.join(...(packageName + '-test').split('/'));
|
const packageDirName = path.join(...`${packageName}-test`.split('/'));
|
||||||
// NOTE(fabio): The `testDirectory` needs to be somewhere **outside** the monorepo root directory.
|
// NOTE(fabio): The `testDirectory` needs to be somewhere **outside** the monorepo root directory.
|
||||||
// Otherwise, it will have access to the hoisted `node_modules` directory and the Typescript missing
|
// Otherwise, it will have access to the hoisted `node_modules` directory and the Typescript missing
|
||||||
// type errors will not be caught.
|
// type errors will not be caught.
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "3.0.5",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "3.0.4",
|
"version": "3.0.4",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v3.0.5 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v3.0.4 - _November 28, 2018_
|
## v3.0.4 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
@ -70,7 +70,7 @@ describe('Order hashing', () => {
|
|||||||
});
|
});
|
||||||
it('returns true if order hash is correct', () => {
|
it('returns true if order hash is correct', () => {
|
||||||
const orderHashLength = 65;
|
const orderHashLength = 65;
|
||||||
const isValid = orderHashUtils.isValidOrderHash('0x' + Array(orderHashLength).join('0'));
|
const isValid = orderHashUtils.isValidOrderHash(`0x${Array(orderHashLength).join('0')}`);
|
||||||
expect(isValid).to.be.true();
|
expect(isValid).to.be.true();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"timestamp": 1544482891,
|
||||||
|
"version": "2.2.7",
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"note": "Dependencies updated"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"timestamp": 1543401373,
|
"timestamp": 1543401373,
|
||||||
"version": "2.2.6",
|
"version": "2.2.6",
|
||||||
|
@ -5,6 +5,10 @@ Edit the package's CHANGELOG.json file only.
|
|||||||
|
|
||||||
CHANGELOG
|
CHANGELOG
|
||||||
|
|
||||||
|
## v2.2.7 - _December 10, 2018_
|
||||||
|
|
||||||
|
* Dependencies updated
|
||||||
|
|
||||||
## v2.2.6 - _November 28, 2018_
|
## v2.2.6 - _November 28, 2018_
|
||||||
|
|
||||||
* Dependencies updated
|
* Dependencies updated
|
||||||
|
7
packages/pipeline/.npmignore
Normal file
7
packages/pipeline/.npmignore
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
.*
|
||||||
|
yarn-error.log
|
||||||
|
/scripts/
|
||||||
|
/generated_docs/
|
||||||
|
/src/
|
||||||
|
tsconfig.json
|
||||||
|
/lib/monorepo_scripts/
|
166
packages/pipeline/README.md
Normal file
166
packages/pipeline/README.md
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
## @0xproject/pipeline
|
||||||
|
|
||||||
|
This repository contains scripts used for scraping data from the Ethereum blockchain into SQL tables for analysis by the 0x team.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We strongly recommend that the community help us make improvements and determine the future direction of the protocol. To report bugs within this package, please create an issue in this repository.
|
||||||
|
|
||||||
|
Please read our [contribution guidelines](../../CONTRIBUTING.md) before getting started.
|
||||||
|
|
||||||
|
### Install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn install
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Clean
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn clean
|
||||||
|
```
|
||||||
|
|
||||||
|
### Lint
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn lint
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migrations
|
||||||
|
|
||||||
|
Create a new migration: `yarn migrate:create --name MigrationNameInCamelCase`
|
||||||
|
Run migrations: `yarn migrate:run`
|
||||||
|
Revert the most recent migration (CAUTION: may result in data loss!): `yarn migrate:revert`
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
There are several test scripts in **package.json**. You can run all the tests
|
||||||
|
with `yarn test:all` or run certain tests seprately by following the
|
||||||
|
instructions below. Some tests may not work out of the box on certain platforms
|
||||||
|
or operating systems (see the "Database tests" section below).
|
||||||
|
|
||||||
|
### Unit tests
|
||||||
|
|
||||||
|
The unit tests can be run with `yarn test`. These tests don't depend on any
|
||||||
|
services or databases and will run in any environment that can run Node.
|
||||||
|
|
||||||
|
### Database tests
|
||||||
|
|
||||||
|
Database integration tests can be run with `yarn test:db`. These tests will
|
||||||
|
attempt to automatically spin up a Postgres database via Docker. If this doesn't
|
||||||
|
work you have two other options:
|
||||||
|
|
||||||
|
1. Set the `DOCKER_SOCKET` environment variable to a valid socket path to use
|
||||||
|
for communicating with Docker.
|
||||||
|
2. Start Postgres manually and set the `ZEROEX_DATA_PIPELINE_TEST_DB_URL`
|
||||||
|
environment variable. If this is set, the tests will use your existing
|
||||||
|
Postgres database instead of trying to create one with Docker.
|
||||||
|
|
||||||
|
## Running locally
|
||||||
|
|
||||||
|
`pipeline` requires access to a PostgreSQL database. The easiest way to start
|
||||||
|
Postgres is via Docker. Depending on your platform, you may need to prepend
|
||||||
|
`sudo` to the following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run --rm -d -p 5432:5432 --name pipeline_postgres postgres:11-alpine
|
||||||
|
```
|
||||||
|
|
||||||
|
This will start a Postgres server with the default username and database name
|
||||||
|
(`postgres` and `postgres`). You should set the environment variable as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
export ZEROEX_DATA_PIPELINE_DB_URL=postgresql://postgres@localhost/postgres
|
||||||
|
```
|
||||||
|
|
||||||
|
First thing you will need to do is run the migrations:
|
||||||
|
|
||||||
|
```
|
||||||
|
yarn migrate:run
|
||||||
|
```
|
||||||
|
|
||||||
|
Now you can run scripts locally:
|
||||||
|
|
||||||
|
```
|
||||||
|
node packages/pipeline/lib/src/scripts/pull_radar_relay_orders.js
|
||||||
|
```
|
||||||
|
|
||||||
|
To stop the Postgres server (you may need to add `sudo`):
|
||||||
|
|
||||||
|
```
|
||||||
|
docker stop pipeline_postgres
|
||||||
|
```
|
||||||
|
|
||||||
|
This will remove all data from the database.
|
||||||
|
|
||||||
|
If you prefer, you can also install Postgres with e.g.,
|
||||||
|
[Homebrew](https://wiki.postgresql.org/wiki/Homebrew) or
|
||||||
|
[Postgress.app](https://postgresapp.com/). Keep in mind that you will need to
|
||||||
|
set the`ZEROEX_DATA_PIPELINE_DB_URL` environment variable to a valid
|
||||||
|
[PostgreSQL connection url](https://stackoverflow.com/questions/3582552/postgresql-connection-url)
|
||||||
|
|
||||||
|
## Directory structure
|
||||||
|
|
||||||
|
```
|
||||||
|
.
|
||||||
|
├── lib: Code generated by the TypeScript compiler. Don't edit this directly.
|
||||||
|
├── migrations: Code for creating and updating database schemas.
|
||||||
|
├── node_modules:
|
||||||
|
├── src: All TypeScript source code.
|
||||||
|
│ ├── data_sources: Code responsible for getting raw data, typically from a third-party source.
|
||||||
|
│ ├── entities: TypeORM entities which closely mirror our database schemas. Some other ORMs call these "models".
|
||||||
|
│ ├── parsers: Code for converting raw data into entities.
|
||||||
|
│ ├── scripts: Executable scripts which put all the pieces together.
|
||||||
|
│ └── utils: Various utils used across packages/files.
|
||||||
|
├── test: All tests go here and are organized in the same way as the folder/file that they test.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding new data to the pipeline
|
||||||
|
|
||||||
|
1. Create an entity in the _entities_ directory. Entities directly mirror our
|
||||||
|
database schemas. We follow the practice of having "dumb" entities, so
|
||||||
|
entity classes should typically not have any methods.
|
||||||
|
2. Create a migration using the `yarn migrate:create` command. Create/update
|
||||||
|
tables as needed. Remember to fill in both the `up` and `down` methods. Try
|
||||||
|
to avoid data loss as much as possible in your migrations.
|
||||||
|
3. Add basic tests for your entity and migrations to the **test/entities/**
|
||||||
|
directory.
|
||||||
|
4. Create a class or function in the **data_sources/** directory for getting
|
||||||
|
raw data. This code should abstract away pagination and rate-limiting as
|
||||||
|
much as possible.
|
||||||
|
5. Create a class or function in the **parsers/** directory for converting the
|
||||||
|
raw data into an entity. Also add tests in the **tests/** directory to test
|
||||||
|
the parser.
|
||||||
|
6. Create an executable script in the **scripts/** directory for putting
|
||||||
|
everything together. Your script can accept environment variables for things
|
||||||
|
like API keys. It should pull the data, parse it, and save it to the
|
||||||
|
database. Scripts should be idempotent and atomic (when possible). What this
|
||||||
|
means is that your script may be responsible for determining _which_ data
|
||||||
|
needs to be updated. For example, you may need to query the database to find
|
||||||
|
the most recent block number that we have already pulled, then pull new data
|
||||||
|
starting from that block number.
|
||||||
|
7. Run the migrations and then run your new script locally and verify it works
|
||||||
|
as expected.
|
||||||
|
|
||||||
|
#### Additional guidelines and tips:
|
||||||
|
|
||||||
|
* Table names should be plural and separated by underscores (e.g.,
|
||||||
|
`exchange_fill_events`).
|
||||||
|
* Any table which contains data which comes directly from a third-party source
|
||||||
|
should be namespaced in the `raw` PostgreSQL schema.
|
||||||
|
* Column names in the database should be separated by underscores (e.g.,
|
||||||
|
`maker_asset_type`).
|
||||||
|
* Field names in entity classes (like any other fields in TypeScript) should
|
||||||
|
be camel-cased (e.g., `makerAssetType`).
|
||||||
|
* All timestamps should be stored as milliseconds since the Unix Epoch.
|
||||||
|
* Use the `BigNumber` type for TypeScript code which deals with 256-bit
|
||||||
|
numbers from smart contracts or for any case where we are dealing with large
|
||||||
|
floating point numbers.
|
||||||
|
* [TypeORM documentation](http://typeorm.io/#/) is pretty robust and can be a
|
||||||
|
helpful resource.
|
0
packages/pipeline/coverage/.gitkeep
Normal file
0
packages/pipeline/coverage/.gitkeep
Normal file
187
packages/pipeline/migrations/1542070840010-InitialSchema.ts
Normal file
187
packages/pipeline/migrations/1542070840010-InitialSchema.ts
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
import { MigrationInterface, QueryRunner, Table } from 'typeorm';
|
||||||
|
|
||||||
|
const blocks = new Table({
|
||||||
|
name: 'raw.blocks',
|
||||||
|
columns: [
|
||||||
|
{ name: 'number', type: 'bigint', isPrimary: true },
|
||||||
|
{ name: 'hash', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'timestamp', type: 'bigint' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const exchange_cancel_events = new Table({
|
||||||
|
name: 'raw.exchange_cancel_events',
|
||||||
|
columns: [
|
||||||
|
{ name: 'contract_address', type: 'char(42)', isPrimary: true },
|
||||||
|
{ name: 'log_index', type: 'integer', isPrimary: true },
|
||||||
|
{ name: 'block_number', type: 'bigint', isPrimary: true },
|
||||||
|
|
||||||
|
{ name: 'raw_data', type: 'varchar' },
|
||||||
|
|
||||||
|
{ name: 'transaction_hash', type: 'varchar' },
|
||||||
|
{ name: 'maker_address', type: 'char(42)' },
|
||||||
|
{ name: 'taker_address', type: 'char(42)' },
|
||||||
|
{ name: 'fee_recipient_address', type: 'char(42)' },
|
||||||
|
{ name: 'sender_address', type: 'char(42)' },
|
||||||
|
{ name: 'order_hash', type: 'varchar' },
|
||||||
|
|
||||||
|
{ name: 'raw_maker_asset_data', type: 'varchar' },
|
||||||
|
{ name: 'maker_asset_type', type: 'varchar' },
|
||||||
|
{ name: 'maker_asset_proxy_id', type: 'varchar' },
|
||||||
|
{ name: 'maker_token_address', type: 'char(42)' },
|
||||||
|
{ name: 'maker_token_id', type: 'varchar', isNullable: true },
|
||||||
|
{ name: 'raw_taker_asset_data', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_type', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_proxy_id', type: 'varchar' },
|
||||||
|
{ name: 'taker_token_address', type: 'char(42)' },
|
||||||
|
{ name: 'taker_token_id', type: 'varchar', isNullable: true },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const exchange_cancel_up_to_events = new Table({
|
||||||
|
name: 'raw.exchange_cancel_up_to_events',
|
||||||
|
columns: [
|
||||||
|
{ name: 'contract_address', type: 'char(42)', isPrimary: true },
|
||||||
|
{ name: 'log_index', type: 'integer', isPrimary: true },
|
||||||
|
{ name: 'block_number', type: 'bigint', isPrimary: true },
|
||||||
|
|
||||||
|
{ name: 'raw_data', type: 'varchar' },
|
||||||
|
|
||||||
|
{ name: 'transaction_hash', type: 'varchar' },
|
||||||
|
{ name: 'maker_address', type: 'char(42)' },
|
||||||
|
{ name: 'sender_address', type: 'char(42)' },
|
||||||
|
{ name: 'order_epoch', type: 'varchar' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const exchange_fill_events = new Table({
|
||||||
|
name: 'raw.exchange_fill_events',
|
||||||
|
columns: [
|
||||||
|
{ name: 'contract_address', type: 'char(42)', isPrimary: true },
|
||||||
|
{ name: 'log_index', type: 'integer', isPrimary: true },
|
||||||
|
{ name: 'block_number', type: 'bigint', isPrimary: true },
|
||||||
|
|
||||||
|
{ name: 'raw_data', type: 'varchar' },
|
||||||
|
|
||||||
|
{ name: 'transaction_hash', type: 'varchar' },
|
||||||
|
{ name: 'maker_address', type: 'char(42)' },
|
||||||
|
{ name: 'taker_address', type: 'char(42)' },
|
||||||
|
{ name: 'fee_recipient_address', type: 'char(42)' },
|
||||||
|
{ name: 'sender_address', type: 'char(42)' },
|
||||||
|
{ name: 'maker_asset_filled_amount', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_filled_amount', type: 'varchar' },
|
||||||
|
{ name: 'maker_fee_paid', type: 'varchar' },
|
||||||
|
{ name: 'taker_fee_paid', type: 'varchar' },
|
||||||
|
{ name: 'order_hash', type: 'varchar' },
|
||||||
|
|
||||||
|
{ name: 'raw_maker_asset_data', type: 'varchar' },
|
||||||
|
{ name: 'maker_asset_type', type: 'varchar' },
|
||||||
|
{ name: 'maker_asset_proxy_id', type: 'varchar' },
|
||||||
|
{ name: 'maker_token_address', type: 'char(42)' },
|
||||||
|
{ name: 'maker_token_id', type: 'varchar', isNullable: true },
|
||||||
|
{ name: 'raw_taker_asset_data', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_type', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_proxy_id', type: 'varchar' },
|
||||||
|
{ name: 'taker_token_address', type: 'char(42)' },
|
||||||
|
{ name: 'taker_token_id', type: 'varchar', isNullable: true },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const relayers = new Table({
|
||||||
|
name: 'raw.relayers',
|
||||||
|
columns: [
|
||||||
|
{ name: 'uuid', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'name', type: 'varchar' },
|
||||||
|
{ name: 'sra_http_endpoint', type: 'varchar', isNullable: true },
|
||||||
|
{ name: 'sra_ws_endpoint', type: 'varchar', isNullable: true },
|
||||||
|
{ name: 'app_url', type: 'varchar', isNullable: true },
|
||||||
|
{ name: 'fee_recipient_addresses', type: 'char(42)', isArray: true },
|
||||||
|
{ name: 'taker_addresses', type: 'char(42)', isArray: true },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const sra_orders = new Table({
|
||||||
|
name: 'raw.sra_orders',
|
||||||
|
columns: [
|
||||||
|
{ name: 'exchange_address', type: 'char(42)', isPrimary: true },
|
||||||
|
{ name: 'order_hash_hex', type: 'varchar', isPrimary: true },
|
||||||
|
|
||||||
|
{ name: 'source_url', type: 'varchar' },
|
||||||
|
{ name: 'last_updated_timestamp', type: 'bigint' },
|
||||||
|
{ name: 'first_seen_timestamp', type: 'bigint' },
|
||||||
|
|
||||||
|
{ name: 'maker_address', type: 'char(42)' },
|
||||||
|
{ name: 'taker_address', type: 'char(42)' },
|
||||||
|
{ name: 'fee_recipient_address', type: 'char(42)' },
|
||||||
|
{ name: 'sender_address', type: 'char(42)' },
|
||||||
|
{ name: 'maker_asset_filled_amount', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_filled_amount', type: 'varchar' },
|
||||||
|
{ name: 'maker_fee', type: 'varchar' },
|
||||||
|
{ name: 'taker_fee', type: 'varchar' },
|
||||||
|
{ name: 'expiration_time_seconds', type: 'int' },
|
||||||
|
{ name: 'salt', type: 'varchar' },
|
||||||
|
{ name: 'signature', type: 'varchar' },
|
||||||
|
|
||||||
|
{ name: 'raw_maker_asset_data', type: 'varchar' },
|
||||||
|
{ name: 'maker_asset_type', type: 'varchar' },
|
||||||
|
{ name: 'maker_asset_proxy_id', type: 'varchar' },
|
||||||
|
{ name: 'maker_token_address', type: 'char(42)' },
|
||||||
|
{ name: 'maker_token_id', type: 'varchar', isNullable: true },
|
||||||
|
{ name: 'raw_taker_asset_data', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_type', type: 'varchar' },
|
||||||
|
{ name: 'taker_asset_proxy_id', type: 'varchar' },
|
||||||
|
{ name: 'taker_token_address', type: 'char(42)' },
|
||||||
|
{ name: 'taker_token_id', type: 'varchar', isNullable: true },
|
||||||
|
|
||||||
|
{ name: 'metadata_json', type: 'varchar' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const token_on_chain_metadata = new Table({
|
||||||
|
name: 'raw.token_on_chain_metadata',
|
||||||
|
columns: [
|
||||||
|
{ name: 'address', type: 'char(42)', isPrimary: true },
|
||||||
|
{ name: 'decimals', type: 'integer' },
|
||||||
|
{ name: 'symbol', type: 'varchar' },
|
||||||
|
{ name: 'name', type: 'varchar' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const transactions = new Table({
|
||||||
|
name: 'raw.transactions',
|
||||||
|
columns: [
|
||||||
|
{ name: 'block_number', type: 'bigint', isPrimary: true },
|
||||||
|
{ name: 'block_hash', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'transaction_hash', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'gas_used', type: 'bigint' },
|
||||||
|
{ name: 'gas_price', type: 'bigint' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
export class InitialSchema1542070840010 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.createSchema('raw');
|
||||||
|
|
||||||
|
await queryRunner.createTable(blocks);
|
||||||
|
await queryRunner.createTable(exchange_cancel_events);
|
||||||
|
await queryRunner.createTable(exchange_cancel_up_to_events);
|
||||||
|
await queryRunner.createTable(exchange_fill_events);
|
||||||
|
await queryRunner.createTable(relayers);
|
||||||
|
await queryRunner.createTable(sra_orders);
|
||||||
|
await queryRunner.createTable(token_on_chain_metadata);
|
||||||
|
await queryRunner.createTable(transactions);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.dropTable(blocks.name);
|
||||||
|
await queryRunner.dropTable(exchange_cancel_events.name);
|
||||||
|
await queryRunner.dropTable(exchange_cancel_up_to_events.name);
|
||||||
|
await queryRunner.dropTable(exchange_fill_events.name);
|
||||||
|
await queryRunner.dropTable(relayers.name);
|
||||||
|
await queryRunner.dropTable(sra_orders.name);
|
||||||
|
await queryRunner.dropTable(token_on_chain_metadata.name);
|
||||||
|
await queryRunner.dropTable(transactions.name);
|
||||||
|
|
||||||
|
await queryRunner.dropSchema('raw');
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,48 @@
|
|||||||
|
import { MigrationInterface, QueryRunner, Table } from 'typeorm';
|
||||||
|
|
||||||
|
export class NewSraOrderTimestampFormat1542147915364 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.sra_orders
|
||||||
|
DROP CONSTRAINT "PK_09bfb9980715329563bd53d667e",
|
||||||
|
ADD PRIMARY KEY (order_hash_hex, exchange_address, source_url);
|
||||||
|
`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`CREATE TABLE raw.sra_orders_observed_timestamps (
|
||||||
|
order_hash_hex varchar NOT NULL,
|
||||||
|
exchange_address varchar NOT NULL,
|
||||||
|
source_url varchar NOT NULL,
|
||||||
|
observed_timestamp bigint NOT NULL,
|
||||||
|
FOREIGN KEY
|
||||||
|
(order_hash_hex, exchange_address, source_url)
|
||||||
|
REFERENCES raw.sra_orders (order_hash_hex, exchange_address, source_url),
|
||||||
|
PRIMARY KEY (order_hash_hex, exchange_address, source_url, observed_timestamp)
|
||||||
|
);`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.sra_orders
|
||||||
|
DROP COLUMN last_updated_timestamp,
|
||||||
|
DROP COLUMN first_seen_timestamp;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.dropTable('raw.sra_orders_observed_timestamps');
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.sra_orders
|
||||||
|
ADD COLUMN last_updated_timestamp bigint NOT NULL DEFAULT 0,
|
||||||
|
ADD COLUMN first_seen_timestamp bigint NOT NULL DEFAULT 0;`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.sra_orders
|
||||||
|
DROP CONSTRAINT sra_orders_pkey,
|
||||||
|
ADD CONSTRAINT "PK_09bfb9980715329563bd53d667e" PRIMARY KEY ("exchange_address", "order_hash_hex");
|
||||||
|
`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,13 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class RenameSraOrdersFilledAmounts1542152278484 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.renameColumn('raw.sra_orders', 'maker_asset_filled_amount', 'maker_asset_amount');
|
||||||
|
await queryRunner.renameColumn('raw.sra_orders', 'taker_asset_filled_amount', 'taker_asset_amount');
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.renameColumn('raw.sra_orders', 'maker_asset_amount', 'maker_asset_filled_amount');
|
||||||
|
await queryRunner.renameColumn('raw.sra_orders', 'taker_asset_amount', 'taker_asset_filled_amount');
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,53 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class ConvertBigNumberToNumeric1542234704666 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.exchange_fill_events
|
||||||
|
ALTER COLUMN maker_asset_filled_amount TYPE numeric USING maker_asset_filled_amount::numeric,
|
||||||
|
ALTER COLUMN taker_asset_filled_amount TYPE numeric USING taker_asset_filled_amount::numeric,
|
||||||
|
ALTER COLUMN maker_fee_paid TYPE numeric USING maker_fee_paid::numeric,
|
||||||
|
ALTER COLUMN taker_fee_paid TYPE numeric USING taker_fee_paid::numeric;`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.exchange_cancel_up_to_events
|
||||||
|
ALTER COLUMN order_epoch TYPE numeric USING order_epoch::numeric;`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.sra_orders
|
||||||
|
ALTER COLUMN maker_asset_amount TYPE numeric USING maker_asset_amount::numeric,
|
||||||
|
ALTER COLUMN taker_asset_amount TYPE numeric USING taker_asset_amount::numeric,
|
||||||
|
ALTER COLUMN maker_fee TYPE numeric USING maker_fee::numeric,
|
||||||
|
ALTER COLUMN taker_fee TYPE numeric USING taker_fee::numeric,
|
||||||
|
ALTER COLUMN expiration_time_seconds TYPE numeric USING expiration_time_seconds::numeric,
|
||||||
|
ALTER COLUMN salt TYPE numeric USING salt::numeric;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.sra_orders
|
||||||
|
ALTER COLUMN maker_asset_amount TYPE varchar USING maker_asset_amount::varchar,
|
||||||
|
ALTER COLUMN taker_asset_amount TYPE varchar USING taker_asset_amount::varchar,
|
||||||
|
ALTER COLUMN maker_fee TYPE varchar USING maker_fee::varchar,
|
||||||
|
ALTER COLUMN taker_fee TYPE varchar USING taker_fee::varchar,
|
||||||
|
ALTER COLUMN expiration_time_seconds TYPE varchar USING expiration_time_seconds::varchar,
|
||||||
|
ALTER COLUMN salt TYPE varchar USING salt::varchar;`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.exchange_cancel_up_to_events
|
||||||
|
ALTER COLUMN order_epoch TYPE varchar USING order_epoch::varchar;`,
|
||||||
|
);
|
||||||
|
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.exchange_fill_events
|
||||||
|
ALTER COLUMN maker_asset_filled_amount TYPE varchar USING maker_asset_filled_amount::varchar,
|
||||||
|
ALTER COLUMN taker_asset_filled_amount TYPE varchar USING taker_asset_filled_amount::varchar,
|
||||||
|
ALTER COLUMN maker_fee_paid TYPE varchar USING maker_fee_paid::varchar,
|
||||||
|
ALTER COLUMN taker_fee_paid TYPE varchar USING taker_fee_paid::varchar;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
import { MigrationInterface, QueryRunner, TableColumn } from 'typeorm';
|
||||||
|
|
||||||
|
export class AddHomepageUrlToRelayers1542249766882 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.addColumn(
|
||||||
|
'raw.relayers',
|
||||||
|
new TableColumn({ name: 'homepage_url', type: 'varchar', default: `'unknown'` }),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.dropColumn('raw.relayers', 'homepage_url');
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class MakeTakerAddressNullable1542401122477 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.exchange_cancel_events
|
||||||
|
ALTER COLUMN taker_address DROP NOT NULL;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.exchange_cancel_events
|
||||||
|
ALTER COLUMN taker_address SET NOT NULL;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,60 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class NewMetadataAndOHLCVTables1542655823221 implements MigrationInterface {
|
||||||
|
// tslint:disable-next-line
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(`
|
||||||
|
CREATE TABLE raw.token_metadata (
|
||||||
|
address VARCHAR NOT NULL,
|
||||||
|
authority VARCHAR NOT NULL,
|
||||||
|
decimals INT NULL,
|
||||||
|
symbol VARCHAR NULL,
|
||||||
|
name VARCHAR NULL,
|
||||||
|
|
||||||
|
PRIMARY KEY (address, authority)
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
await queryRunner.dropTable('raw.token_on_chain_metadata');
|
||||||
|
|
||||||
|
await queryRunner.query(`
|
||||||
|
CREATE TABLE raw.ohlcv_external (
|
||||||
|
exchange VARCHAR NOT NULL,
|
||||||
|
from_symbol VARCHAR NOT NULL,
|
||||||
|
to_symbol VARCHAR NOT NULL,
|
||||||
|
start_time BIGINT NOT NULL,
|
||||||
|
end_time BIGINT NOT NULL,
|
||||||
|
|
||||||
|
open DOUBLE PRECISION NOT NULL,
|
||||||
|
close DOUBLE PRECISION NOT NULL,
|
||||||
|
low DOUBLE PRECISION NOT NULL,
|
||||||
|
high DOUBLE PRECISION NOT NULL,
|
||||||
|
volume_from DOUBLE PRECISION NOT NULL,
|
||||||
|
volume_to DOUBLE PRECISION NOT NULL,
|
||||||
|
|
||||||
|
source VARCHAR NOT NULL,
|
||||||
|
observed_timestamp BIGINT NOT NULL,
|
||||||
|
|
||||||
|
PRIMARY KEY (exchange, from_symbol, to_symbol, start_time, end_time, source, observed_timestamp)
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// tslint:disable-next-line
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(`
|
||||||
|
CREATE TABLE raw.token_on_chain_metadata (
|
||||||
|
address VARCHAR NOT NULL,
|
||||||
|
decimals INT NULL,
|
||||||
|
symbol VARCHAR NULL,
|
||||||
|
name VARCHAR NULL,
|
||||||
|
|
||||||
|
PRIMARY KEY (address)
|
||||||
|
);
|
||||||
|
`);
|
||||||
|
|
||||||
|
await queryRunner.dropTable('raw.token_metadata');
|
||||||
|
|
||||||
|
await queryRunner.dropTable('raw.ohlcv_external');
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
import { MigrationInterface, QueryRunner, Table } from 'typeorm';
|
||||||
|
|
||||||
|
const tokenOrderbookSnapshots = new Table({
|
||||||
|
name: 'raw.token_orderbook_snapshots',
|
||||||
|
columns: [
|
||||||
|
{ name: 'observed_timestamp', type: 'bigint', isPrimary: true },
|
||||||
|
{ name: 'source', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'order_type', type: 'order_t' },
|
||||||
|
{ name: 'price', type: 'numeric', isPrimary: true },
|
||||||
|
|
||||||
|
{ name: 'base_asset_symbol', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'base_asset_address', type: 'char(42)' },
|
||||||
|
{ name: 'base_volume', type: 'numeric' },
|
||||||
|
|
||||||
|
{ name: 'quote_asset_symbol', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'quote_asset_address', type: 'char(42)' },
|
||||||
|
{ name: 'quote_volume', type: 'numeric' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
export class TokenOrderbookSnapshots1543434472116 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(`CREATE TYPE order_t AS enum('bid', 'ask');`);
|
||||||
|
await queryRunner.createTable(tokenOrderbookSnapshots);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.dropTable(tokenOrderbookSnapshots.name);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,41 @@
|
|||||||
|
import { MigrationInterface, QueryRunner, Table } from 'typeorm';
|
||||||
|
|
||||||
|
const dexTrades = new Table({
|
||||||
|
name: 'raw.dex_trades',
|
||||||
|
columns: [
|
||||||
|
{ name: 'source_url', type: 'varchar', isPrimary: true },
|
||||||
|
{ name: 'tx_hash', type: 'varchar', isPrimary: true },
|
||||||
|
|
||||||
|
{ name: 'tx_timestamp', type: 'bigint' },
|
||||||
|
{ name: 'tx_date', type: 'varchar' },
|
||||||
|
{ name: 'tx_sender', type: 'varchar(42)' },
|
||||||
|
{ name: 'smart_contract_id', type: 'bigint' },
|
||||||
|
{ name: 'smart_contract_address', type: 'varchar(42)' },
|
||||||
|
{ name: 'contract_type', type: 'varchar' },
|
||||||
|
{ name: 'maker', type: 'varchar(42)' },
|
||||||
|
{ name: 'taker', type: 'varchar(42)' },
|
||||||
|
{ name: 'amount_buy', type: 'numeric' },
|
||||||
|
{ name: 'maker_fee_amount', type: 'numeric' },
|
||||||
|
{ name: 'buy_currency_id', type: 'bigint' },
|
||||||
|
{ name: 'buy_symbol', type: 'varchar' },
|
||||||
|
{ name: 'amount_sell', type: 'numeric' },
|
||||||
|
{ name: 'taker_fee_amount', type: 'numeric' },
|
||||||
|
{ name: 'sell_currency_id', type: 'bigint' },
|
||||||
|
{ name: 'sell_symbol', type: 'varchar' },
|
||||||
|
{ name: 'maker_annotation', type: 'varchar' },
|
||||||
|
{ name: 'taker_annotation', type: 'varchar' },
|
||||||
|
{ name: 'protocol', type: 'varchar' },
|
||||||
|
{ name: 'buy_address', type: 'varchar(42)', isNullable: true },
|
||||||
|
{ name: 'sell_address', type: 'varchar(42)', isNullable: true },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
export class CreateDexTrades1543446690436 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.createTable(dexTrades);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.dropTable(dexTrades);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class ConvertTokenMetadataDecimalsToBigNumber1543980079179 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.token_metadata
|
||||||
|
ALTER COLUMN decimals TYPE numeric USING decimals::numeric;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.token_metadata
|
||||||
|
ALTER COLUMN decimals TYPE numeric USING decimals::integer;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,19 @@
|
|||||||
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
|
export class ConvertTransactionGasPriceToBigNumber1543983324954 implements MigrationInterface {
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.transactions
|
||||||
|
ALTER COLUMN gas_price TYPE numeric USING gas_price::numeric,
|
||||||
|
ALTER COLUMN gas_used TYPE numeric USING gas_used::numeric;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.query(
|
||||||
|
`ALTER TABLE raw.transactions
|
||||||
|
ALTER COLUMN gas_price TYPE numeric USING gas_price::bigint,
|
||||||
|
ALTER COLUMN gas_used TYPE numeric USING gas_used::bigint;`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
65
packages/pipeline/package.json
Normal file
65
packages/pipeline/package.json
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"name": "@0x/pipeline",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"description": "Data pipeline for offline analysis",
|
||||||
|
"scripts": {
|
||||||
|
"build": "yarn tsc -b",
|
||||||
|
"build:ci": "yarn build",
|
||||||
|
"test": "yarn run_mocha",
|
||||||
|
"rebuild_and_test": "run-s build test:all",
|
||||||
|
"test:db": "yarn run_mocha:db",
|
||||||
|
"test:all": "run-s test test:db",
|
||||||
|
"test:circleci": "yarn test:coverage",
|
||||||
|
"run_mocha": "mocha --require source-map-support/register --require make-promises-safe 'lib/test/!(entities)/**/*_test.js' --bail --exit",
|
||||||
|
"run_mocha:db": "mocha --require source-map-support/register --require make-promises-safe lib/test/db_global_hooks.js 'lib/test/entities/*_test.js' --bail --exit --timeout 60000",
|
||||||
|
"test:coverage": "nyc npm run test:all --all && yarn coverage:report:lcov",
|
||||||
|
"coverage:report:lcov": "nyc report --reporter=text-lcov > coverage/lcov.info",
|
||||||
|
"clean": "shx rm -rf lib",
|
||||||
|
"lint": "tslint --project . --format stylish --exclude ./migrations/**/*",
|
||||||
|
"migrate:run": "yarn typeorm migration:run --config ./lib/src/ormconfig",
|
||||||
|
"migrate:revert": "yarn typeorm migration:revert --config ./lib/src/ormconfig",
|
||||||
|
"migrate:create": "yarn typeorm migration:create --config ./lib/src/ormconfig --dir migrations"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/0xProject/0x-monorepo"
|
||||||
|
},
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"devDependencies": {
|
||||||
|
"@0x/tslint-config": "^1.0.9",
|
||||||
|
"@types/axios": "^0.14.0",
|
||||||
|
"@types/ramda": "^0.25.38",
|
||||||
|
"chai": "^4.1.2",
|
||||||
|
"chai-as-promised": "^7.1.1",
|
||||||
|
"chai-bignumber": "^2.0.2",
|
||||||
|
"dirty-chai": "^2.0.1",
|
||||||
|
"mocha": "^5.2.0",
|
||||||
|
"tslint": "5.11.0",
|
||||||
|
"typescript": "3.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@0x/connect": "^3.0.2",
|
||||||
|
"@0x/contract-artifacts": "^1.0.1",
|
||||||
|
"@0x/contract-wrappers": "^3.0.0",
|
||||||
|
"@0x/dev-utils": "^1.0.13",
|
||||||
|
"@0x/order-utils": "^2.0.0",
|
||||||
|
"@0x/subproviders": "^2.1.0",
|
||||||
|
"@0x/types": "^1.2.0",
|
||||||
|
"@0x/utils": "^2.0.3",
|
||||||
|
"@0x/web3-wrapper": "^3.1.0",
|
||||||
|
"@types/dockerode": "^2.5.9",
|
||||||
|
"@types/p-limit": "^2.0.0",
|
||||||
|
"async-parallel": "^1.2.3",
|
||||||
|
"axios": "^0.18.0",
|
||||||
|
"bottleneck": "^2.13.2",
|
||||||
|
"dockerode": "^2.5.7",
|
||||||
|
"ethereum-types": "^1.0.6",
|
||||||
|
"pg": "^7.5.0",
|
||||||
|
"prettier": "^1.15.3",
|
||||||
|
"ramda": "^0.25.0",
|
||||||
|
"reflect-metadata": "^0.1.12",
|
||||||
|
"sqlite3": "^4.0.2",
|
||||||
|
"typeorm": "^0.2.7"
|
||||||
|
}
|
||||||
|
}
|
133
packages/pipeline/src/data_sources/bloxy/index.ts
Normal file
133
packages/pipeline/src/data_sources/bloxy/index.ts
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
import axios from 'axios';
|
||||||
|
import * as R from 'ramda';
|
||||||
|
|
||||||
|
// URL to use for getting dex trades from Bloxy.
|
||||||
|
export const BLOXY_DEX_TRADES_URL = 'https://bloxy.info/api/dex/trades';
|
||||||
|
// Number of trades to get at once. Must be less than or equal to MAX_OFFSET.
|
||||||
|
const TRADES_PER_QUERY = 10000;
|
||||||
|
// Maximum offset supported by the Bloxy API.
|
||||||
|
const MAX_OFFSET = 100000;
|
||||||
|
// Buffer to subtract from offset. This means we will request some trades twice
|
||||||
|
// but we have less chance on missing out on any data.
|
||||||
|
const OFFSET_BUFFER = 1000;
|
||||||
|
// Maximum number of days supported by the Bloxy API.
|
||||||
|
const MAX_DAYS = 30;
|
||||||
|
// Buffer used for comparing the last seen timestamp to the last returned
|
||||||
|
// timestamp. Increasing this reduces chances of data loss but also creates more
|
||||||
|
// redundancy and can impact performance.
|
||||||
|
// tslint:disable-next-line:custom-no-magic-numbers
|
||||||
|
const LAST_SEEN_TIMESTAMP_BUFFER_MS = 1000 * 60 * 30; // 30 minutes
|
||||||
|
|
||||||
|
// tslint:disable-next-line:custom-no-magic-numbers
|
||||||
|
const millisecondsPerDay = 1000 * 60 * 60 * 24; // ms/d = ms/s * s/m * m/h * h/d
|
||||||
|
|
||||||
|
export interface BloxyTrade {
|
||||||
|
tx_hash: string;
|
||||||
|
tx_time: string;
|
||||||
|
tx_date: string;
|
||||||
|
tx_sender: string;
|
||||||
|
smart_contract_id: number;
|
||||||
|
smart_contract_address: string;
|
||||||
|
contract_type: string;
|
||||||
|
maker: string;
|
||||||
|
taker: string;
|
||||||
|
amountBuy: number;
|
||||||
|
makerFee: number;
|
||||||
|
buyCurrencyId: number;
|
||||||
|
buySymbol: string;
|
||||||
|
amountSell: number;
|
||||||
|
takerFee: number;
|
||||||
|
sellCurrencyId: number;
|
||||||
|
sellSymbol: string;
|
||||||
|
maker_annotation: string;
|
||||||
|
taker_annotation: string;
|
||||||
|
protocol: string;
|
||||||
|
buyAddress: string | null;
|
||||||
|
sellAddress: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BloxyError {
|
||||||
|
error: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type BloxyResponse<T> = T | BloxyError;
|
||||||
|
type BloxyTradeResponse = BloxyResponse<BloxyTrade[]>;
|
||||||
|
|
||||||
|
function isError<T>(response: BloxyResponse<T>): response is BloxyError {
|
||||||
|
return (response as BloxyError).error !== undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class BloxySource {
|
||||||
|
private readonly _apiKey: string;
|
||||||
|
|
||||||
|
constructor(apiKey: string) {
|
||||||
|
this._apiKey = apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all latest trades between the lastSeenTimestamp (minus some buffer)
|
||||||
|
* and the current time. Note that because the Bloxy API has some hard
|
||||||
|
* limits it might not always be possible to get *all* the trades in the
|
||||||
|
* desired time range.
|
||||||
|
* @param lastSeenTimestamp The latest timestamp for trades that have
|
||||||
|
* already been seen.
|
||||||
|
*/
|
||||||
|
public async getDexTradesAsync(lastSeenTimestamp: number): Promise<BloxyTrade[]> {
|
||||||
|
let allTrades: BloxyTrade[] = [];
|
||||||
|
|
||||||
|
// Clamp numberOfDays so that it is always between 1 and MAX_DAYS (inclusive)
|
||||||
|
const numberOfDays = R.clamp(1, MAX_DAYS, getDaysSinceTimestamp(lastSeenTimestamp));
|
||||||
|
|
||||||
|
// Keep getting trades until we hit one of the following conditions:
|
||||||
|
//
|
||||||
|
// 1. Offset hits MAX_OFFSET (we can't go back any further).
|
||||||
|
// 2. There are no more trades in the response.
|
||||||
|
// 3. We see a tx_time equal to or earlier than lastSeenTimestamp (plus
|
||||||
|
// some buffer).
|
||||||
|
//
|
||||||
|
for (let offset = 0; offset <= MAX_OFFSET; offset += TRADES_PER_QUERY - OFFSET_BUFFER) {
|
||||||
|
const trades = await this._getTradesWithOffsetAsync(numberOfDays, offset);
|
||||||
|
if (trades.length === 0) {
|
||||||
|
// There are no more trades left for the days we are querying.
|
||||||
|
// This means we are done.
|
||||||
|
return filterDuplicateTrades(allTrades);
|
||||||
|
}
|
||||||
|
const sortedTrades = R.reverse(R.sortBy(trade => trade.tx_time, trades));
|
||||||
|
allTrades = allTrades.concat(sortedTrades);
|
||||||
|
|
||||||
|
// Check if lastReturnedTimestamp < lastSeenTimestamp
|
||||||
|
const lastReturnedTimestamp = new Date(sortedTrades[0].tx_time).getTime();
|
||||||
|
if (lastReturnedTimestamp < lastSeenTimestamp - LAST_SEEN_TIMESTAMP_BUFFER_MS) {
|
||||||
|
// We are at the point where we have already seen trades for the
|
||||||
|
// timestamp range that is being returned. We're done.
|
||||||
|
return filterDuplicateTrades(allTrades);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filterDuplicateTrades(allTrades);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _getTradesWithOffsetAsync(numberOfDays: number, offset: number): Promise<BloxyTrade[]> {
|
||||||
|
const resp = await axios.get<BloxyTradeResponse>(BLOXY_DEX_TRADES_URL, {
|
||||||
|
params: {
|
||||||
|
key: this._apiKey,
|
||||||
|
days: numberOfDays,
|
||||||
|
limit: TRADES_PER_QUERY,
|
||||||
|
offset,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (isError(resp.data)) {
|
||||||
|
throw new Error(`Error in Bloxy API response: ${resp.data.error}`);
|
||||||
|
}
|
||||||
|
return resp.data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Computes the number of days between the given timestamp and the current
|
||||||
|
// timestamp (rounded up).
|
||||||
|
function getDaysSinceTimestamp(timestamp: number): number {
|
||||||
|
const msSinceTimestamp = Date.now() - timestamp;
|
||||||
|
const daysSinceTimestamp = msSinceTimestamp / millisecondsPerDay;
|
||||||
|
return Math.ceil(daysSinceTimestamp);
|
||||||
|
}
|
||||||
|
|
||||||
|
const filterDuplicateTrades = R.uniqBy((trade: BloxyTrade) => trade.tx_hash);
|
@ -0,0 +1,85 @@
|
|||||||
|
import {
|
||||||
|
ContractWrappers,
|
||||||
|
ExchangeCancelEventArgs,
|
||||||
|
ExchangeCancelUpToEventArgs,
|
||||||
|
ExchangeEventArgs,
|
||||||
|
ExchangeEvents,
|
||||||
|
ExchangeFillEventArgs,
|
||||||
|
ExchangeWrapper,
|
||||||
|
} from '@0x/contract-wrappers';
|
||||||
|
import { Web3ProviderEngine } from '@0x/subproviders';
|
||||||
|
import { Web3Wrapper } from '@0x/web3-wrapper';
|
||||||
|
import { LogWithDecodedArgs } from 'ethereum-types';
|
||||||
|
|
||||||
|
import { EXCHANGE_START_BLOCK } from '../../utils';
|
||||||
|
|
||||||
|
const BLOCK_FINALITY_THRESHOLD = 10; // When to consider blocks as final. Used to compute default toBlock.
|
||||||
|
const NUM_BLOCKS_PER_QUERY = 20000; // Number of blocks to query for events at a time.
|
||||||
|
|
||||||
|
export class ExchangeEventsSource {
|
||||||
|
private readonly _exchangeWrapper: ExchangeWrapper;
|
||||||
|
private readonly _web3Wrapper: Web3Wrapper;
|
||||||
|
constructor(provider: Web3ProviderEngine, networkId: number) {
|
||||||
|
this._web3Wrapper = new Web3Wrapper(provider);
|
||||||
|
const contractWrappers = new ContractWrappers(provider, { networkId });
|
||||||
|
this._exchangeWrapper = contractWrappers.exchange;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getFillEventsAsync(
|
||||||
|
fromBlock?: number,
|
||||||
|
toBlock?: number,
|
||||||
|
): Promise<Array<LogWithDecodedArgs<ExchangeFillEventArgs>>> {
|
||||||
|
return this._getEventsAsync<ExchangeFillEventArgs>(ExchangeEvents.Fill, fromBlock, toBlock);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getCancelEventsAsync(
|
||||||
|
fromBlock?: number,
|
||||||
|
toBlock?: number,
|
||||||
|
): Promise<Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>> {
|
||||||
|
return this._getEventsAsync<ExchangeCancelEventArgs>(ExchangeEvents.Cancel, fromBlock, toBlock);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getCancelUpToEventsAsync(
|
||||||
|
fromBlock?: number,
|
||||||
|
toBlock?: number,
|
||||||
|
): Promise<Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>> {
|
||||||
|
return this._getEventsAsync<ExchangeCancelUpToEventArgs>(ExchangeEvents.CancelUpTo, fromBlock, toBlock);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _getEventsAsync<ArgsType extends ExchangeEventArgs>(
|
||||||
|
eventName: ExchangeEvents,
|
||||||
|
fromBlock: number = EXCHANGE_START_BLOCK,
|
||||||
|
toBlock?: number,
|
||||||
|
): Promise<Array<LogWithDecodedArgs<ArgsType>>> {
|
||||||
|
const calculatedToBlock =
|
||||||
|
toBlock === undefined
|
||||||
|
? (await this._web3Wrapper.getBlockNumberAsync()) - BLOCK_FINALITY_THRESHOLD
|
||||||
|
: toBlock;
|
||||||
|
let events: Array<LogWithDecodedArgs<ArgsType>> = [];
|
||||||
|
for (let currFromBlock = fromBlock; currFromBlock <= calculatedToBlock; currFromBlock += NUM_BLOCKS_PER_QUERY) {
|
||||||
|
events = events.concat(
|
||||||
|
await this._getEventsForRangeAsync<ArgsType>(
|
||||||
|
eventName,
|
||||||
|
currFromBlock,
|
||||||
|
Math.min(currFromBlock + NUM_BLOCKS_PER_QUERY - 1, calculatedToBlock),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return events;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _getEventsForRangeAsync<ArgsType extends ExchangeEventArgs>(
|
||||||
|
eventName: ExchangeEvents,
|
||||||
|
fromBlock: number,
|
||||||
|
toBlock: number,
|
||||||
|
): Promise<Array<LogWithDecodedArgs<ArgsType>>> {
|
||||||
|
return this._exchangeWrapper.getLogsAsync<ArgsType>(
|
||||||
|
eventName,
|
||||||
|
{
|
||||||
|
fromBlock,
|
||||||
|
toBlock,
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
78
packages/pipeline/src/data_sources/ddex/index.ts
Normal file
78
packages/pipeline/src/data_sources/ddex/index.ts
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
import { fetchAsync, logUtils } from '@0x/utils';
|
||||||
|
|
||||||
|
const DDEX_BASE_URL = 'https://api.ddex.io/v2';
|
||||||
|
const ACTIVE_MARKETS_URL = `${DDEX_BASE_URL}/markets`;
|
||||||
|
const NO_AGGREGATION_LEVEL = 3; // See https://docs.ddex.io/#get-orderbook
|
||||||
|
const ORDERBOOK_ENDPOINT = `/orderbook?level=${NO_AGGREGATION_LEVEL}`;
|
||||||
|
export const DDEX_SOURCE = 'ddex';
|
||||||
|
|
||||||
|
export interface DdexActiveMarketsResponse {
|
||||||
|
status: number;
|
||||||
|
desc: string;
|
||||||
|
data: {
|
||||||
|
markets: DdexMarket[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DdexMarket {
|
||||||
|
id: string;
|
||||||
|
quoteToken: string;
|
||||||
|
quoteTokenDecimals: number;
|
||||||
|
quoteTokenAddress: string;
|
||||||
|
baseToken: string;
|
||||||
|
baseTokenDecimals: number;
|
||||||
|
baseTokenAddress: string;
|
||||||
|
minOrderSize: string;
|
||||||
|
maxOrderSize: string;
|
||||||
|
pricePrecision: number;
|
||||||
|
priceDecimals: number;
|
||||||
|
amountDecimals: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DdexOrderbookResponse {
|
||||||
|
status: number;
|
||||||
|
desc: string;
|
||||||
|
data: {
|
||||||
|
orderBook: DdexOrderbook;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DdexOrderbook {
|
||||||
|
marketId: string;
|
||||||
|
bids: DdexOrder[];
|
||||||
|
asks: DdexOrder[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DdexOrder {
|
||||||
|
price: string;
|
||||||
|
amount: string;
|
||||||
|
orderId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// tslint:disable:prefer-function-over-method
|
||||||
|
// ^ Keep consistency with other sources and help logical organization
|
||||||
|
export class DdexSource {
|
||||||
|
/**
|
||||||
|
* Call Ddex API to find out which markets they are maintaining orderbooks for.
|
||||||
|
*/
|
||||||
|
public async getActiveMarketsAsync(): Promise<DdexMarket[]> {
|
||||||
|
logUtils.log('Getting all active DDEX markets');
|
||||||
|
const resp = await fetchAsync(ACTIVE_MARKETS_URL);
|
||||||
|
const respJson: DdexActiveMarketsResponse = await resp.json();
|
||||||
|
const markets = respJson.data.markets;
|
||||||
|
logUtils.log(`Got ${markets.length} markets.`);
|
||||||
|
return markets;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve orderbook from Ddex API for a given market.
|
||||||
|
* @param marketId String identifying the market we want data for. Eg. 'REP/AUG'
|
||||||
|
*/
|
||||||
|
public async getMarketOrderbookAsync(marketId: string): Promise<DdexOrderbook> {
|
||||||
|
logUtils.log(`${marketId}: Retrieving orderbook.`);
|
||||||
|
const marketOrderbookUrl = `${ACTIVE_MARKETS_URL}/${marketId}${ORDERBOOK_ENDPOINT}`;
|
||||||
|
const resp = await fetchAsync(marketOrderbookUrl);
|
||||||
|
const respJson: DdexOrderbookResponse = await resp.json();
|
||||||
|
return respJson.data.orderBook;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,110 @@
|
|||||||
|
// tslint:disable:no-duplicate-imports
|
||||||
|
import { fetchAsync } from '@0x/utils';
|
||||||
|
import Bottleneck from 'bottleneck';
|
||||||
|
import { stringify } from 'querystring';
|
||||||
|
import * as R from 'ramda';
|
||||||
|
|
||||||
|
import { TradingPair } from '../../utils/get_ohlcv_trading_pairs';
|
||||||
|
|
||||||
|
export interface CryptoCompareOHLCVResponse {
|
||||||
|
Data: CryptoCompareOHLCVRecord[];
|
||||||
|
Response: string;
|
||||||
|
Message: string;
|
||||||
|
Type: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CryptoCompareOHLCVRecord {
|
||||||
|
time: number; // in seconds, not milliseconds
|
||||||
|
close: number;
|
||||||
|
high: number;
|
||||||
|
low: number;
|
||||||
|
open: number;
|
||||||
|
volumefrom: number;
|
||||||
|
volumeto: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CryptoCompareOHLCVParams {
|
||||||
|
fsym: string;
|
||||||
|
tsym: string;
|
||||||
|
e?: string;
|
||||||
|
aggregate?: string;
|
||||||
|
aggregatePredictableTimePeriods?: boolean;
|
||||||
|
limit?: number;
|
||||||
|
toTs?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ONE_HOUR = 60 * 60 * 1000; // tslint:disable-line:custom-no-magic-numbers
|
||||||
|
const ONE_SECOND = 1000;
|
||||||
|
const ONE_HOUR_AGO = new Date().getTime() - ONE_HOUR;
|
||||||
|
const HTTP_OK_STATUS = 200;
|
||||||
|
const CRYPTO_COMPARE_VALID_EMPTY_RESPONSE_TYPE = 96;
|
||||||
|
const MAX_PAGE_SIZE = 2000;
|
||||||
|
|
||||||
|
export class CryptoCompareOHLCVSource {
|
||||||
|
public readonly intervalBetweenRecords = ONE_HOUR;
|
||||||
|
public readonly defaultExchange = 'CCCAGG';
|
||||||
|
public readonly interval = this.intervalBetweenRecords * MAX_PAGE_SIZE; // the hourly API returns data for one interval at a time
|
||||||
|
private readonly _url: string = 'https://min-api.cryptocompare.com/data/histohour?';
|
||||||
|
|
||||||
|
// rate-limit for all API calls through this class instance
|
||||||
|
private readonly _limiter: Bottleneck;
|
||||||
|
constructor(maxReqsPerSecond: number) {
|
||||||
|
this._limiter = new Bottleneck({
|
||||||
|
minTime: ONE_SECOND / maxReqsPerSecond,
|
||||||
|
reservoir: 30,
|
||||||
|
reservoirRefreshAmount: 30,
|
||||||
|
reservoirRefreshInterval: ONE_SECOND,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// gets OHLCV records starting from pair.latest
|
||||||
|
public async getHourlyOHLCVAsync(pair: TradingPair): Promise<CryptoCompareOHLCVRecord[]> {
|
||||||
|
const params = {
|
||||||
|
e: this.defaultExchange,
|
||||||
|
fsym: pair.fromSymbol,
|
||||||
|
tsym: pair.toSymbol,
|
||||||
|
limit: MAX_PAGE_SIZE,
|
||||||
|
toTs: Math.floor((pair.latestSavedTime + this.interval) / ONE_SECOND), // CryptoCompare uses timestamp in seconds. not ms
|
||||||
|
};
|
||||||
|
const url = this._url + stringify(params);
|
||||||
|
const response = await this._limiter.schedule(() => fetchAsync(url));
|
||||||
|
if (response.status !== HTTP_OK_STATUS) {
|
||||||
|
throw new Error(`HTTP error while scraping Crypto Compare: [${response}]`);
|
||||||
|
}
|
||||||
|
const json: CryptoCompareOHLCVResponse = await response.json();
|
||||||
|
if (
|
||||||
|
(json.Response === 'Error' || json.Data.length === 0) &&
|
||||||
|
json.Type !== CRYPTO_COMPARE_VALID_EMPTY_RESPONSE_TYPE
|
||||||
|
) {
|
||||||
|
throw new Error(JSON.stringify(json));
|
||||||
|
}
|
||||||
|
return json.Data.filter(rec => {
|
||||||
|
return (
|
||||||
|
// Crypto Compare takes ~30 mins to finalise records
|
||||||
|
rec.time * ONE_SECOND < ONE_HOUR_AGO && rec.time * ONE_SECOND > pair.latestSavedTime && hasData(rec)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
public generateBackfillIntervals(pair: TradingPair): TradingPair[] {
|
||||||
|
const now = new Date().getTime();
|
||||||
|
const f = (p: TradingPair): false | [TradingPair, TradingPair] => {
|
||||||
|
if (p.latestSavedTime > now) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
return [p, R.merge(p, { latestSavedTime: p.latestSavedTime + this.interval })];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return R.unfold(f, pair);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasData(record: CryptoCompareOHLCVRecord): boolean {
|
||||||
|
return (
|
||||||
|
record.close !== 0 ||
|
||||||
|
record.open !== 0 ||
|
||||||
|
record.high !== 0 ||
|
||||||
|
record.low !== 0 ||
|
||||||
|
record.volumefrom !== 0 ||
|
||||||
|
record.volumeto !== 0
|
||||||
|
);
|
||||||
|
}
|
92
packages/pipeline/src/data_sources/paradex/index.ts
Normal file
92
packages/pipeline/src/data_sources/paradex/index.ts
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import { fetchAsync, logUtils } from '@0x/utils';
|
||||||
|
|
||||||
|
const PARADEX_BASE_URL = 'https://api.paradex.io/consumer/v0';
|
||||||
|
const ACTIVE_MARKETS_URL = `${PARADEX_BASE_URL}/markets`;
|
||||||
|
const ORDERBOOK_ENDPOINT = `${PARADEX_BASE_URL}/orderbook`;
|
||||||
|
const TOKEN_INFO_ENDPOINT = `${PARADEX_BASE_URL}/tokens`;
|
||||||
|
export const PARADEX_SOURCE = 'paradex';
|
||||||
|
|
||||||
|
export type ParadexActiveMarketsResponse = ParadexMarket[];
|
||||||
|
|
||||||
|
export interface ParadexMarket {
|
||||||
|
id: string;
|
||||||
|
symbol: string;
|
||||||
|
baseToken: string;
|
||||||
|
quoteToken: string;
|
||||||
|
minOrderSize: string;
|
||||||
|
maxOrderSize: string;
|
||||||
|
priceMaxDecimals: number;
|
||||||
|
amountMaxDecimals: number;
|
||||||
|
// These are not native to the Paradex API response. We tag them on later
|
||||||
|
// by calling the token endpoint and joining on symbol.
|
||||||
|
baseTokenAddress?: string;
|
||||||
|
quoteTokenAddress?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParadexOrderbookResponse {
|
||||||
|
marketId: number;
|
||||||
|
marketSymbol: string;
|
||||||
|
bids: ParadexOrder[];
|
||||||
|
asks: ParadexOrder[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParadexOrder {
|
||||||
|
amount: string;
|
||||||
|
price: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ParadexTokenInfoResponse = ParadexTokenInfo[];
|
||||||
|
|
||||||
|
export interface ParadexTokenInfo {
|
||||||
|
name: string;
|
||||||
|
symbol: string;
|
||||||
|
address: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ParadexSource {
|
||||||
|
private readonly _apiKey: string;
|
||||||
|
|
||||||
|
constructor(apiKey: string) {
|
||||||
|
this._apiKey = apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call Paradex API to find out which markets they are maintaining orderbooks for.
|
||||||
|
*/
|
||||||
|
public async getActiveMarketsAsync(): Promise<ParadexActiveMarketsResponse> {
|
||||||
|
logUtils.log('Getting all active Paradex markets.');
|
||||||
|
const resp = await fetchAsync(ACTIVE_MARKETS_URL, {
|
||||||
|
headers: { 'API-KEY': this._apiKey },
|
||||||
|
});
|
||||||
|
const markets: ParadexActiveMarketsResponse = await resp.json();
|
||||||
|
logUtils.log(`Got ${markets.length} markets.`);
|
||||||
|
return markets;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call Paradex API to find out their token information.
|
||||||
|
*/
|
||||||
|
public async getTokenInfoAsync(): Promise<ParadexTokenInfoResponse> {
|
||||||
|
logUtils.log('Getting token information from Paradex.');
|
||||||
|
const resp = await fetchAsync(TOKEN_INFO_ENDPOINT, {
|
||||||
|
headers: { 'API-KEY': this._apiKey },
|
||||||
|
});
|
||||||
|
const tokens: ParadexTokenInfoResponse = await resp.json();
|
||||||
|
logUtils.log(`Got information for ${tokens.length} tokens.`);
|
||||||
|
return tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve orderbook from Paradex API for a given market.
|
||||||
|
* @param marketSymbol String representing the market we want data for.
|
||||||
|
*/
|
||||||
|
public async getMarketOrderbookAsync(marketSymbol: string): Promise<ParadexOrderbookResponse> {
|
||||||
|
logUtils.log(`${marketSymbol}: Retrieving orderbook.`);
|
||||||
|
const marketOrderbookUrl = `${ORDERBOOK_ENDPOINT}?market=${marketSymbol}`;
|
||||||
|
const resp = await fetchAsync(marketOrderbookUrl, {
|
||||||
|
headers: { 'API-KEY': this._apiKey },
|
||||||
|
});
|
||||||
|
const orderbookResponse: ParadexOrderbookResponse = await resp.json();
|
||||||
|
return orderbookResponse;
|
||||||
|
}
|
||||||
|
}
|
33
packages/pipeline/src/data_sources/relayer-registry/index.ts
Normal file
33
packages/pipeline/src/data_sources/relayer-registry/index.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
export interface RelayerResponse {
|
||||||
|
name: string;
|
||||||
|
homepage_url: string;
|
||||||
|
app_url: string;
|
||||||
|
header_img: string;
|
||||||
|
logo_img: string;
|
||||||
|
networks: RelayerResponseNetwork[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RelayerResponseNetwork {
|
||||||
|
networkId: number;
|
||||||
|
sra_http_endpoint?: string;
|
||||||
|
sra_ws_endpoint?: string;
|
||||||
|
static_order_fields?: {
|
||||||
|
fee_recipient_addresses?: string[];
|
||||||
|
taker_addresses?: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class RelayerRegistrySource {
|
||||||
|
private readonly _url: string;
|
||||||
|
|
||||||
|
constructor(url: string) {
|
||||||
|
this._url = url;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getRelayerInfoAsync(): Promise<Map<string, RelayerResponse>> {
|
||||||
|
const resp = await axios.get<Map<string, RelayerResponse>>(this._url);
|
||||||
|
return resp.data;
|
||||||
|
}
|
||||||
|
}
|
29
packages/pipeline/src/data_sources/trusted_tokens/index.ts
Normal file
29
packages/pipeline/src/data_sources/trusted_tokens/index.ts
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
export interface ZeroExTrustedTokenMeta {
|
||||||
|
address: string;
|
||||||
|
name: string;
|
||||||
|
symbol: string;
|
||||||
|
decimals: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MetamaskTrustedTokenMeta {
|
||||||
|
address: string;
|
||||||
|
name: string;
|
||||||
|
erc20: boolean;
|
||||||
|
symbol: string;
|
||||||
|
decimals: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TrustedTokenSource<T> {
|
||||||
|
private readonly _url: string;
|
||||||
|
|
||||||
|
constructor(url: string) {
|
||||||
|
this._url = url;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrustedTokenMetaAsync(): Promise<T> {
|
||||||
|
const resp = await axios.get<T>(this._url);
|
||||||
|
return resp.data;
|
||||||
|
}
|
||||||
|
}
|
22
packages/pipeline/src/data_sources/web3/index.ts
Normal file
22
packages/pipeline/src/data_sources/web3/index.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import { Web3ProviderEngine } from '@0x/subproviders';
|
||||||
|
import { Web3Wrapper } from '@0x/web3-wrapper';
|
||||||
|
import { BlockWithoutTransactionData, Transaction } from 'ethereum-types';
|
||||||
|
|
||||||
|
export class Web3Source {
|
||||||
|
private readonly _web3Wrapper: Web3Wrapper;
|
||||||
|
constructor(provider: Web3ProviderEngine) {
|
||||||
|
this._web3Wrapper = new Web3Wrapper(provider);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getBlockInfoAsync(blockNumber: number): Promise<BlockWithoutTransactionData> {
|
||||||
|
const block = await this._web3Wrapper.getBlockIfExistsAsync(blockNumber);
|
||||||
|
if (block == null) {
|
||||||
|
return Promise.reject(new Error(`Could not find block for given block number: ${blockNumber}`));
|
||||||
|
}
|
||||||
|
return block;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTransactionInfoAsync(txHash: string): Promise<Transaction> {
|
||||||
|
return this._web3Wrapper.getTransactionByHashAsync(txHash);
|
||||||
|
}
|
||||||
|
}
|
13
packages/pipeline/src/entities/block.ts
Normal file
13
packages/pipeline/src/entities/block.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'blocks', schema: 'raw' })
|
||||||
|
export class Block {
|
||||||
|
@PrimaryColumn() public hash!: string;
|
||||||
|
@PrimaryColumn({ transformer: numberToBigIntTransformer })
|
||||||
|
public number!: number;
|
||||||
|
|
||||||
|
@Column({ name: 'timestamp', transformer: numberToBigIntTransformer })
|
||||||
|
public timestamp!: number;
|
||||||
|
}
|
54
packages/pipeline/src/entities/dex_trade.ts
Normal file
54
packages/pipeline/src/entities/dex_trade.ts
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'dex_trades', schema: 'raw' })
|
||||||
|
export class DexTrade {
|
||||||
|
@PrimaryColumn({ name: 'source_url' })
|
||||||
|
public sourceUrl!: string;
|
||||||
|
@PrimaryColumn({ name: 'tx_hash' })
|
||||||
|
public txHash!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'tx_timestamp', type: 'bigint', transformer: numberToBigIntTransformer })
|
||||||
|
public txTimestamp!: number;
|
||||||
|
@Column({ name: 'tx_date' })
|
||||||
|
public txDate!: string;
|
||||||
|
@Column({ name: 'tx_sender' })
|
||||||
|
public txSender!: string;
|
||||||
|
@Column({ name: 'smart_contract_id', type: 'bigint', transformer: numberToBigIntTransformer })
|
||||||
|
public smartContractId!: number;
|
||||||
|
@Column({ name: 'smart_contract_address' })
|
||||||
|
public smartContractAddress!: string;
|
||||||
|
@Column({ name: 'contract_type' })
|
||||||
|
public contractType!: string;
|
||||||
|
@Column({ type: 'varchar' })
|
||||||
|
public maker!: string;
|
||||||
|
@Column({ type: 'varchar' })
|
||||||
|
public taker!: string;
|
||||||
|
@Column({ name: 'amount_buy', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public amountBuy!: BigNumber;
|
||||||
|
@Column({ name: 'maker_fee_amount', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public makerFeeAmount!: BigNumber;
|
||||||
|
@Column({ name: 'buy_currency_id', type: 'bigint', transformer: numberToBigIntTransformer })
|
||||||
|
public buyCurrencyId!: number;
|
||||||
|
@Column({ name: 'buy_symbol' })
|
||||||
|
public buySymbol!: string;
|
||||||
|
@Column({ name: 'amount_sell', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public amountSell!: BigNumber;
|
||||||
|
@Column({ name: 'taker_fee_amount', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public takerFeeAmount!: BigNumber;
|
||||||
|
@Column({ name: 'sell_currency_id', type: 'bigint', transformer: numberToBigIntTransformer })
|
||||||
|
public sellCurrencyId!: number;
|
||||||
|
@Column({ name: 'sell_symbol' })
|
||||||
|
public sellSymbol!: string;
|
||||||
|
@Column({ name: 'maker_annotation' })
|
||||||
|
public makerAnnotation!: string;
|
||||||
|
@Column({ name: 'taker_annotation' })
|
||||||
|
public takerAnnotation!: string;
|
||||||
|
@Column() public protocol!: string;
|
||||||
|
@Column({ name: 'buy_address', type: 'varchar', nullable: true })
|
||||||
|
public buyAddress!: string | null;
|
||||||
|
@Column({ name: 'sell_address', type: 'varchar', nullable: true })
|
||||||
|
public sellAddress!: string | null;
|
||||||
|
}
|
51
packages/pipeline/src/entities/exchange_cancel_event.ts
Normal file
51
packages/pipeline/src/entities/exchange_cancel_event.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { AssetType } from '../types';
|
||||||
|
import { numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'exchange_cancel_events', schema: 'raw' })
|
||||||
|
export class ExchangeCancelEvent {
|
||||||
|
@PrimaryColumn({ name: 'contract_address' })
|
||||||
|
public contractAddress!: string;
|
||||||
|
@PrimaryColumn({ name: 'log_index' })
|
||||||
|
public logIndex!: number;
|
||||||
|
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
|
||||||
|
public blockNumber!: number;
|
||||||
|
|
||||||
|
@Column({ name: 'raw_data' })
|
||||||
|
public rawData!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'transaction_hash' })
|
||||||
|
public transactionHash!: string;
|
||||||
|
@Column({ name: 'maker_address' })
|
||||||
|
public makerAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'taker_address' })
|
||||||
|
public takerAddress!: string;
|
||||||
|
@Column({ name: 'fee_recipient_address' })
|
||||||
|
public feeRecipientAddress!: string;
|
||||||
|
@Column({ name: 'sender_address' })
|
||||||
|
public senderAddress!: string;
|
||||||
|
@Column({ name: 'order_hash' })
|
||||||
|
public orderHash!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'raw_maker_asset_data' })
|
||||||
|
public rawMakerAssetData!: string;
|
||||||
|
@Column({ name: 'maker_asset_type' })
|
||||||
|
public makerAssetType!: AssetType;
|
||||||
|
@Column({ name: 'maker_asset_proxy_id' })
|
||||||
|
public makerAssetProxyId!: string;
|
||||||
|
@Column({ name: 'maker_token_address' })
|
||||||
|
public makerTokenAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'maker_token_id' })
|
||||||
|
public makerTokenId!: string | null;
|
||||||
|
@Column({ name: 'raw_taker_asset_data' })
|
||||||
|
public rawTakerAssetData!: string;
|
||||||
|
@Column({ name: 'taker_asset_type' })
|
||||||
|
public takerAssetType!: AssetType;
|
||||||
|
@Column({ name: 'taker_asset_proxy_id' })
|
||||||
|
public takerAssetProxyId!: string;
|
||||||
|
@Column({ name: 'taker_token_address' })
|
||||||
|
public takerTokenAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'taker_token_id' })
|
||||||
|
public takerTokenId!: string | null;
|
||||||
|
}
|
@ -0,0 +1,26 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'exchange_cancel_up_to_events', schema: 'raw' })
|
||||||
|
export class ExchangeCancelUpToEvent {
|
||||||
|
@PrimaryColumn({ name: 'contract_address' })
|
||||||
|
public contractAddress!: string;
|
||||||
|
@PrimaryColumn({ name: 'log_index' })
|
||||||
|
public logIndex!: number;
|
||||||
|
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
|
||||||
|
public blockNumber!: number;
|
||||||
|
|
||||||
|
@Column({ name: 'raw_data' })
|
||||||
|
public rawData!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'transaction_hash' })
|
||||||
|
public transactionHash!: string;
|
||||||
|
@Column({ name: 'maker_address' })
|
||||||
|
public makerAddress!: string;
|
||||||
|
@Column({ name: 'sender_address' })
|
||||||
|
public senderAddress!: string;
|
||||||
|
@Column({ name: 'order_epoch', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public orderEpoch!: BigNumber;
|
||||||
|
}
|
60
packages/pipeline/src/entities/exchange_fill_event.ts
Normal file
60
packages/pipeline/src/entities/exchange_fill_event.ts
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { AssetType } from '../types';
|
||||||
|
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'exchange_fill_events', schema: 'raw' })
|
||||||
|
export class ExchangeFillEvent {
|
||||||
|
@PrimaryColumn({ name: 'contract_address' })
|
||||||
|
public contractAddress!: string;
|
||||||
|
@PrimaryColumn({ name: 'log_index' })
|
||||||
|
public logIndex!: number;
|
||||||
|
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
|
||||||
|
public blockNumber!: number;
|
||||||
|
|
||||||
|
@Column({ name: 'raw_data' })
|
||||||
|
public rawData!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'transaction_hash' })
|
||||||
|
public transactionHash!: string;
|
||||||
|
@Column({ name: 'maker_address' })
|
||||||
|
public makerAddress!: string;
|
||||||
|
@Column({ name: 'taker_address' })
|
||||||
|
public takerAddress!: string;
|
||||||
|
@Column({ name: 'fee_recipient_address' })
|
||||||
|
public feeRecipientAddress!: string;
|
||||||
|
@Column({ name: 'sender_address' })
|
||||||
|
public senderAddress!: string;
|
||||||
|
@Column({ name: 'maker_asset_filled_amount', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public makerAssetFilledAmount!: BigNumber;
|
||||||
|
@Column({ name: 'taker_asset_filled_amount', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public takerAssetFilledAmount!: BigNumber;
|
||||||
|
@Column({ name: 'maker_fee_paid', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public makerFeePaid!: BigNumber;
|
||||||
|
@Column({ name: 'taker_fee_paid', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public takerFeePaid!: BigNumber;
|
||||||
|
@Column({ name: 'order_hash' })
|
||||||
|
public orderHash!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'raw_maker_asset_data' })
|
||||||
|
public rawMakerAssetData!: string;
|
||||||
|
@Column({ name: 'maker_asset_type' })
|
||||||
|
public makerAssetType!: AssetType;
|
||||||
|
@Column({ name: 'maker_asset_proxy_id' })
|
||||||
|
public makerAssetProxyId!: string;
|
||||||
|
@Column({ name: 'maker_token_address' })
|
||||||
|
public makerTokenAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'maker_token_id' })
|
||||||
|
public makerTokenId!: string | null;
|
||||||
|
@Column({ name: 'raw_taker_asset_data' })
|
||||||
|
public rawTakerAssetData!: string;
|
||||||
|
@Column({ name: 'taker_asset_type' })
|
||||||
|
public takerAssetType!: AssetType;
|
||||||
|
@Column({ name: 'taker_asset_proxy_id' })
|
||||||
|
public takerAssetProxyId!: string;
|
||||||
|
@Column({ name: 'taker_token_address' })
|
||||||
|
public takerTokenAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'taker_token_id' })
|
||||||
|
public takerTokenId!: string | null;
|
||||||
|
}
|
18
packages/pipeline/src/entities/index.ts
Normal file
18
packages/pipeline/src/entities/index.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import { ExchangeCancelEvent } from './exchange_cancel_event';
|
||||||
|
import { ExchangeCancelUpToEvent } from './exchange_cancel_up_to_event';
|
||||||
|
import { ExchangeFillEvent } from './exchange_fill_event';
|
||||||
|
|
||||||
|
export { Block } from './block';
|
||||||
|
export { DexTrade } from './dex_trade';
|
||||||
|
export { ExchangeCancelEvent } from './exchange_cancel_event';
|
||||||
|
export { ExchangeCancelUpToEvent } from './exchange_cancel_up_to_event';
|
||||||
|
export { ExchangeFillEvent } from './exchange_fill_event';
|
||||||
|
export { OHLCVExternal } from './ohlcv_external';
|
||||||
|
export { Relayer } from './relayer';
|
||||||
|
export { SraOrder } from './sra_order';
|
||||||
|
export { SraOrdersObservedTimeStamp, createObservedTimestampForOrder } from './sra_order_observed_timestamp';
|
||||||
|
export { TokenMetadata } from './token_metadata';
|
||||||
|
export { TokenOrderbookSnapshot } from './token_order';
|
||||||
|
export { Transaction } from './transaction';
|
||||||
|
|
||||||
|
export type ExchangeEvent = ExchangeFillEvent | ExchangeCancelEvent | ExchangeCancelUpToEvent;
|
30
packages/pipeline/src/entities/ohlcv_external.ts
Normal file
30
packages/pipeline/src/entities/ohlcv_external.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'ohlcv_external', schema: 'raw' })
|
||||||
|
export class OHLCVExternal {
|
||||||
|
@PrimaryColumn() public exchange!: string;
|
||||||
|
|
||||||
|
@PrimaryColumn({ name: 'from_symbol', type: 'varchar' })
|
||||||
|
public fromSymbol!: string;
|
||||||
|
@PrimaryColumn({ name: 'to_symbol', type: 'varchar' })
|
||||||
|
public toSymbol!: string;
|
||||||
|
@PrimaryColumn({ name: 'start_time', transformer: numberToBigIntTransformer })
|
||||||
|
public startTime!: number;
|
||||||
|
@PrimaryColumn({ name: 'end_time', transformer: numberToBigIntTransformer })
|
||||||
|
public endTime!: number;
|
||||||
|
|
||||||
|
@Column() public open!: number;
|
||||||
|
@Column() public close!: number;
|
||||||
|
@Column() public low!: number;
|
||||||
|
@Column() public high!: number;
|
||||||
|
@Column({ name: 'volume_from' })
|
||||||
|
public volumeFrom!: number;
|
||||||
|
@Column({ name: 'volume_to' })
|
||||||
|
public volumeTo!: number;
|
||||||
|
|
||||||
|
@PrimaryColumn() public source!: string;
|
||||||
|
@PrimaryColumn({ name: 'observed_timestamp', transformer: numberToBigIntTransformer })
|
||||||
|
public observedTimestamp!: number;
|
||||||
|
}
|
21
packages/pipeline/src/entities/relayer.ts
Normal file
21
packages/pipeline/src/entities/relayer.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
@Entity({ name: 'relayers', schema: 'raw' })
|
||||||
|
export class Relayer {
|
||||||
|
@PrimaryColumn() public uuid!: string;
|
||||||
|
|
||||||
|
@Column() public name!: string;
|
||||||
|
@Column({ name: 'homepage_url', type: 'varchar' })
|
||||||
|
public homepageUrl!: string;
|
||||||
|
@Column({ name: 'sra_http_endpoint', type: 'varchar', nullable: true })
|
||||||
|
public sraHttpEndpoint!: string | null;
|
||||||
|
@Column({ name: 'sra_ws_endpoint', type: 'varchar', nullable: true })
|
||||||
|
public sraWsEndpoint!: string | null;
|
||||||
|
@Column({ name: 'app_url', type: 'varchar', nullable: true })
|
||||||
|
public appUrl!: string | null;
|
||||||
|
|
||||||
|
@Column({ name: 'fee_recipient_addresses', type: 'varchar', array: true })
|
||||||
|
public feeRecipientAddresses!: string[];
|
||||||
|
@Column({ name: 'taker_addresses', type: 'varchar', array: true })
|
||||||
|
public takerAddresses!: string[];
|
||||||
|
}
|
63
packages/pipeline/src/entities/sra_order.ts
Normal file
63
packages/pipeline/src/entities/sra_order.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { AssetType } from '../types';
|
||||||
|
import { bigNumberTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'sra_orders', schema: 'raw' })
|
||||||
|
export class SraOrder {
|
||||||
|
@PrimaryColumn({ name: 'exchange_address' })
|
||||||
|
public exchangeAddress!: string;
|
||||||
|
@PrimaryColumn({ name: 'order_hash_hex' })
|
||||||
|
public orderHashHex!: string;
|
||||||
|
@PrimaryColumn({ name: 'source_url' })
|
||||||
|
public sourceUrl!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'maker_address' })
|
||||||
|
public makerAddress!: string;
|
||||||
|
@Column({ name: 'taker_address' })
|
||||||
|
public takerAddress!: string;
|
||||||
|
@Column({ name: 'fee_recipient_address' })
|
||||||
|
public feeRecipientAddress!: string;
|
||||||
|
@Column({ name: 'sender_address' })
|
||||||
|
public senderAddress!: string;
|
||||||
|
@Column({ name: 'maker_asset_amount', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public makerAssetAmount!: BigNumber;
|
||||||
|
@Column({ name: 'taker_asset_amount', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public takerAssetAmount!: BigNumber;
|
||||||
|
@Column({ name: 'maker_fee', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public makerFee!: BigNumber;
|
||||||
|
@Column({ name: 'taker_fee', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public takerFee!: BigNumber;
|
||||||
|
@Column({ name: 'expiration_time_seconds', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public expirationTimeSeconds!: BigNumber;
|
||||||
|
@Column({ name: 'salt', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public salt!: BigNumber;
|
||||||
|
@Column({ name: 'signature' })
|
||||||
|
public signature!: string;
|
||||||
|
|
||||||
|
@Column({ name: 'raw_maker_asset_data' })
|
||||||
|
public rawMakerAssetData!: string;
|
||||||
|
@Column({ name: 'maker_asset_type' })
|
||||||
|
public makerAssetType!: AssetType;
|
||||||
|
@Column({ name: 'maker_asset_proxy_id' })
|
||||||
|
public makerAssetProxyId!: string;
|
||||||
|
@Column({ name: 'maker_token_address' })
|
||||||
|
public makerTokenAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'maker_token_id' })
|
||||||
|
public makerTokenId!: string | null;
|
||||||
|
@Column({ name: 'raw_taker_asset_data' })
|
||||||
|
public rawTakerAssetData!: string;
|
||||||
|
@Column({ name: 'taker_asset_type' })
|
||||||
|
public takerAssetType!: AssetType;
|
||||||
|
@Column({ name: 'taker_asset_proxy_id' })
|
||||||
|
public takerAssetProxyId!: string;
|
||||||
|
@Column({ name: 'taker_token_address' })
|
||||||
|
public takerTokenAddress!: string;
|
||||||
|
@Column({ nullable: true, type: String, name: 'taker_token_id' })
|
||||||
|
public takerTokenId!: string | null;
|
||||||
|
|
||||||
|
// TODO(albrow): Make this optional?
|
||||||
|
@Column({ name: 'metadata_json' })
|
||||||
|
public metadataJson!: string;
|
||||||
|
}
|
@ -0,0 +1,35 @@
|
|||||||
|
import { Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
import { SraOrder } from './sra_order';
|
||||||
|
|
||||||
|
@Entity({ name: 'sra_orders_observed_timestamps', schema: 'raw' })
|
||||||
|
export class SraOrdersObservedTimeStamp {
|
||||||
|
@PrimaryColumn({ name: 'exchange_address' })
|
||||||
|
public exchangeAddress!: string;
|
||||||
|
@PrimaryColumn({ name: 'order_hash_hex' })
|
||||||
|
public orderHashHex!: string;
|
||||||
|
@PrimaryColumn({ name: 'source_url' })
|
||||||
|
public sourceUrl!: string;
|
||||||
|
|
||||||
|
@PrimaryColumn({ name: 'observed_timestamp', transformer: numberToBigIntTransformer })
|
||||||
|
public observedTimestamp!: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a new SraOrdersObservedTimeStamp for the given order based on the
|
||||||
|
* current time.
|
||||||
|
* @param order The order to generate a timestamp for.
|
||||||
|
*/
|
||||||
|
export function createObservedTimestampForOrder(
|
||||||
|
order: SraOrder,
|
||||||
|
observedTimestamp: number,
|
||||||
|
): SraOrdersObservedTimeStamp {
|
||||||
|
const observed = new SraOrdersObservedTimeStamp();
|
||||||
|
observed.exchangeAddress = order.exchangeAddress;
|
||||||
|
observed.orderHashHex = order.orderHashHex;
|
||||||
|
observed.sourceUrl = order.sourceUrl;
|
||||||
|
observed.observedTimestamp = observedTimestamp;
|
||||||
|
return observed;
|
||||||
|
}
|
22
packages/pipeline/src/entities/token_metadata.ts
Normal file
22
packages/pipeline/src/entities/token_metadata.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { bigNumberTransformer } from '../utils/transformers';
|
||||||
|
|
||||||
|
@Entity({ name: 'token_metadata', schema: 'raw' })
|
||||||
|
export class TokenMetadata {
|
||||||
|
@PrimaryColumn({ type: 'varchar', nullable: false })
|
||||||
|
public address!: string;
|
||||||
|
|
||||||
|
@PrimaryColumn({ type: 'varchar', nullable: false })
|
||||||
|
public authority!: string;
|
||||||
|
|
||||||
|
@Column({ type: 'numeric', transformer: bigNumberTransformer, nullable: true })
|
||||||
|
public decimals!: BigNumber | null;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', nullable: true })
|
||||||
|
public symbol!: string | null;
|
||||||
|
|
||||||
|
@Column({ type: 'varchar', nullable: true })
|
||||||
|
public name!: string | null;
|
||||||
|
}
|
29
packages/pipeline/src/entities/token_order.ts
Normal file
29
packages/pipeline/src/entities/token_order.ts
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { OrderType } from '../types';
|
||||||
|
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'token_orderbook_snapshots', schema: 'raw' })
|
||||||
|
export class TokenOrderbookSnapshot {
|
||||||
|
@PrimaryColumn({ name: 'observed_timestamp', type: 'bigint', transformer: numberToBigIntTransformer })
|
||||||
|
public observedTimestamp!: number;
|
||||||
|
@PrimaryColumn({ name: 'source' })
|
||||||
|
public source!: string;
|
||||||
|
@Column({ name: 'order_type' })
|
||||||
|
public orderType!: OrderType;
|
||||||
|
@PrimaryColumn({ name: 'price', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public price!: BigNumber;
|
||||||
|
@PrimaryColumn({ name: 'base_asset_symbol' })
|
||||||
|
public baseAssetSymbol!: string;
|
||||||
|
@Column({ name: 'base_asset_address' })
|
||||||
|
public baseAssetAddress!: string;
|
||||||
|
@Column({ name: 'base_volume', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public baseVolume!: BigNumber;
|
||||||
|
@PrimaryColumn({ name: 'quote_asset_symbol' })
|
||||||
|
public quoteAssetSymbol!: string;
|
||||||
|
@Column({ name: 'quote_asset_address' })
|
||||||
|
public quoteAssetAddress!: string;
|
||||||
|
@Column({ name: 'quote_volume', type: 'numeric', transformer: bigNumberTransformer })
|
||||||
|
public quoteVolume!: BigNumber;
|
||||||
|
}
|
19
packages/pipeline/src/entities/transaction.ts
Normal file
19
packages/pipeline/src/entities/transaction.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
|
||||||
|
|
||||||
|
import { bigNumberTransformer, numberToBigIntTransformer } from '../utils';
|
||||||
|
|
||||||
|
@Entity({ name: 'transactions', schema: 'raw' })
|
||||||
|
export class Transaction {
|
||||||
|
@PrimaryColumn({ name: 'transaction_hash' })
|
||||||
|
public transactionHash!: string;
|
||||||
|
@PrimaryColumn({ name: 'block_hash' })
|
||||||
|
public blockHash!: string;
|
||||||
|
@PrimaryColumn({ name: 'block_number', transformer: numberToBigIntTransformer })
|
||||||
|
public blockNumber!: number;
|
||||||
|
|
||||||
|
@Column({ type: 'numeric', name: 'gas_used', transformer: bigNumberTransformer })
|
||||||
|
public gasUsed!: BigNumber;
|
||||||
|
@Column({ type: 'numeric', name: 'gas_price', transformer: bigNumberTransformer })
|
||||||
|
public gasPrice!: BigNumber;
|
||||||
|
}
|
42
packages/pipeline/src/ormconfig.ts
Normal file
42
packages/pipeline/src/ormconfig.ts
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import { ConnectionOptions } from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Block,
|
||||||
|
DexTrade,
|
||||||
|
ExchangeCancelEvent,
|
||||||
|
ExchangeCancelUpToEvent,
|
||||||
|
ExchangeFillEvent,
|
||||||
|
OHLCVExternal,
|
||||||
|
Relayer,
|
||||||
|
SraOrder,
|
||||||
|
SraOrdersObservedTimeStamp,
|
||||||
|
TokenMetadata,
|
||||||
|
TokenOrderbookSnapshot,
|
||||||
|
Transaction,
|
||||||
|
} from './entities';
|
||||||
|
|
||||||
|
const entities = [
|
||||||
|
Block,
|
||||||
|
DexTrade,
|
||||||
|
ExchangeCancelEvent,
|
||||||
|
ExchangeCancelUpToEvent,
|
||||||
|
ExchangeFillEvent,
|
||||||
|
OHLCVExternal,
|
||||||
|
Relayer,
|
||||||
|
SraOrder,
|
||||||
|
SraOrdersObservedTimeStamp,
|
||||||
|
TokenMetadata,
|
||||||
|
TokenOrderbookSnapshot,
|
||||||
|
Transaction,
|
||||||
|
];
|
||||||
|
|
||||||
|
const config: ConnectionOptions = {
|
||||||
|
type: 'postgres',
|
||||||
|
url: process.env.ZEROEX_DATA_PIPELINE_DB_URL,
|
||||||
|
synchronize: false,
|
||||||
|
logging: ['error'],
|
||||||
|
entities,
|
||||||
|
migrations: ['./lib/migrations/**/*.js'],
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = config;
|
53
packages/pipeline/src/parsers/bloxy/index.ts
Normal file
53
packages/pipeline/src/parsers/bloxy/index.ts
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import * as R from 'ramda';
|
||||||
|
|
||||||
|
import { BLOXY_DEX_TRADES_URL, BloxyTrade } from '../../data_sources/bloxy';
|
||||||
|
import { DexTrade } from '../../entities';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a raw trades response from the Bloxy Dex API and returns an array of
|
||||||
|
* DexTrade entities.
|
||||||
|
* @param rawTrades A raw order response from an SRA endpoint.
|
||||||
|
*/
|
||||||
|
export function parseBloxyTrades(rawTrades: BloxyTrade[]): DexTrade[] {
|
||||||
|
return R.map(_parseBloxyTrade, rawTrades);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a single Bloxy trade into a DexTrade entity.
|
||||||
|
* @param rawTrade A single trade from the response from the Bloxy API.
|
||||||
|
*/
|
||||||
|
export function _parseBloxyTrade(rawTrade: BloxyTrade): DexTrade {
|
||||||
|
const dexTrade = new DexTrade();
|
||||||
|
dexTrade.sourceUrl = BLOXY_DEX_TRADES_URL;
|
||||||
|
dexTrade.txHash = rawTrade.tx_hash;
|
||||||
|
dexTrade.txTimestamp = new Date(rawTrade.tx_time).getTime();
|
||||||
|
dexTrade.txDate = rawTrade.tx_date;
|
||||||
|
dexTrade.txSender = rawTrade.tx_sender;
|
||||||
|
dexTrade.smartContractId = rawTrade.smart_contract_id;
|
||||||
|
dexTrade.smartContractAddress = rawTrade.smart_contract_address;
|
||||||
|
dexTrade.contractType = rawTrade.contract_type;
|
||||||
|
dexTrade.maker = rawTrade.maker;
|
||||||
|
dexTrade.taker = rawTrade.taker;
|
||||||
|
// TODO(albrow): The Bloxy API returns amounts and fees as a `number` type
|
||||||
|
// but some of their values have too many significant digits to be
|
||||||
|
// represented that way. Ideally they will switch to using strings and then
|
||||||
|
// we can update this code.
|
||||||
|
dexTrade.amountBuy = new BigNumber(rawTrade.amountBuy.toString());
|
||||||
|
dexTrade.makerFeeAmount = new BigNumber(rawTrade.makerFee.toString());
|
||||||
|
dexTrade.buyCurrencyId = rawTrade.buyCurrencyId;
|
||||||
|
dexTrade.buySymbol = filterNullCharacters(rawTrade.buySymbol);
|
||||||
|
dexTrade.amountSell = new BigNumber(rawTrade.amountSell.toString());
|
||||||
|
dexTrade.takerFeeAmount = new BigNumber(rawTrade.takerFee.toString());
|
||||||
|
dexTrade.sellCurrencyId = rawTrade.sellCurrencyId;
|
||||||
|
dexTrade.sellSymbol = filterNullCharacters(rawTrade.sellSymbol);
|
||||||
|
dexTrade.makerAnnotation = rawTrade.maker_annotation;
|
||||||
|
dexTrade.takerAnnotation = rawTrade.taker_annotation;
|
||||||
|
dexTrade.protocol = rawTrade.protocol;
|
||||||
|
dexTrade.buyAddress = rawTrade.buyAddress;
|
||||||
|
dexTrade.sellAddress = rawTrade.sellAddress;
|
||||||
|
return dexTrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Works with any form of escaped null character (e.g., '\0' and '\u0000').
|
||||||
|
const filterNullCharacters = R.replace(/\0/g, '');
|
77
packages/pipeline/src/parsers/ddex_orders/index.ts
Normal file
77
packages/pipeline/src/parsers/ddex_orders/index.ts
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import { BigNumber } from '@0x/utils';
|
||||||
|
import * as R from 'ramda';
|
||||||
|
|
||||||
|
import { DdexMarket, DdexOrder, DdexOrderbook } from '../../data_sources/ddex';
|
||||||
|
import { TokenOrderbookSnapshot as TokenOrder } from '../../entities';
|
||||||
|
import { OrderType } from '../../types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Marque function of this file.
|
||||||
|
* 1) Takes in orders from an orderbook,
|
||||||
|
* other information attached.
|
||||||
|
* @param ddexOrderbook A raw orderbook that we pull from the Ddex API.
|
||||||
|
* @param ddexMarket An object containing market data also directly from the API.
|
||||||
|
* @param observedTimestamp Time at which the orders for the market were pulled.
|
||||||
|
* @param source The exchange where these orders are placed. In this case 'ddex'.
|
||||||
|
*/
|
||||||
|
export function parseDdexOrders(
|
||||||
|
ddexOrderbook: DdexOrderbook,
|
||||||
|
ddexMarket: DdexMarket,
|
||||||
|
observedTimestamp: number,
|
||||||
|
source: string,
|
||||||
|
): TokenOrder[] {
|
||||||
|
const aggregatedBids = aggregateOrders(ddexOrderbook.bids);
|
||||||
|
const aggregatedAsks = aggregateOrders(ddexOrderbook.asks);
|
||||||
|
const parsedBids = aggregatedBids.map(order => parseDdexOrder(ddexMarket, observedTimestamp, 'bid', source, order));
|
||||||
|
const parsedAsks = aggregatedAsks.map(order => parseDdexOrder(ddexMarket, observedTimestamp, 'ask', source, order));
|
||||||
|
return parsedBids.concat(parsedAsks);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aggregates orders by price point for consistency with other exchanges.
|
||||||
|
* Querying the Ddex API at level 3 setting returns a breakdown of
|
||||||
|
* individual orders at each price point. Other exchanges only give total amount
|
||||||
|
* at each price point. Returns an array of <price, amount> tuples.
|
||||||
|
* @param ddexOrders A list of Ddex orders awaiting aggregation.
|
||||||
|
*/
|
||||||
|
export function aggregateOrders(ddexOrders: DdexOrder[]): Array<[string, BigNumber]> {
|
||||||
|
const sumAmount = (acc: BigNumber, order: DdexOrder): BigNumber => acc.plus(order.amount);
|
||||||
|
const aggregatedPricePoints = R.reduceBy(sumAmount, new BigNumber(0), R.prop('price'), ddexOrders);
|
||||||
|
return Object.entries(aggregatedPricePoints);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a single aggregated Ddex order in order to form a tokenOrder entity
|
||||||
|
* which can be saved into the database.
|
||||||
|
* @param ddexMarket An object containing information about the market where these
|
||||||
|
* trades have been placed.
|
||||||
|
* @param observedTimestamp The time when the API response returned back to us.
|
||||||
|
* @param orderType 'bid' or 'ask' enum.
|
||||||
|
* @param source Exchange where these orders were placed.
|
||||||
|
* @param ddexOrder A <price, amount> tuple which we will convert to volume-basis.
|
||||||
|
*/
|
||||||
|
export function parseDdexOrder(
|
||||||
|
ddexMarket: DdexMarket,
|
||||||
|
observedTimestamp: number,
|
||||||
|
orderType: OrderType,
|
||||||
|
source: string,
|
||||||
|
ddexOrder: [string, BigNumber],
|
||||||
|
): TokenOrder {
|
||||||
|
const tokenOrder = new TokenOrder();
|
||||||
|
const price = new BigNumber(ddexOrder[0]);
|
||||||
|
const amount = ddexOrder[1];
|
||||||
|
|
||||||
|
tokenOrder.source = source;
|
||||||
|
tokenOrder.observedTimestamp = observedTimestamp;
|
||||||
|
tokenOrder.orderType = orderType;
|
||||||
|
tokenOrder.price = price;
|
||||||
|
|
||||||
|
tokenOrder.baseAssetSymbol = ddexMarket.baseToken;
|
||||||
|
tokenOrder.baseAssetAddress = ddexMarket.baseTokenAddress;
|
||||||
|
tokenOrder.baseVolume = price.times(amount);
|
||||||
|
|
||||||
|
tokenOrder.quoteAssetSymbol = ddexMarket.quoteToken;
|
||||||
|
tokenOrder.quoteAssetAddress = ddexMarket.quoteTokenAddress;
|
||||||
|
tokenOrder.quoteVolume = amount;
|
||||||
|
return tokenOrder;
|
||||||
|
}
|
133
packages/pipeline/src/parsers/events/index.ts
Normal file
133
packages/pipeline/src/parsers/events/index.ts
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
import { ExchangeCancelEventArgs, ExchangeCancelUpToEventArgs, ExchangeFillEventArgs } from '@0x/contract-wrappers';
|
||||||
|
import { assetDataUtils } from '@0x/order-utils';
|
||||||
|
import { AssetProxyId, ERC721AssetData } from '@0x/types';
|
||||||
|
import { LogWithDecodedArgs } from 'ethereum-types';
|
||||||
|
import * as R from 'ramda';
|
||||||
|
|
||||||
|
import { ExchangeCancelEvent, ExchangeCancelUpToEvent, ExchangeFillEvent } from '../../entities';
|
||||||
|
import { bigNumbertoStringOrNull } from '../../utils';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses raw event logs for a fill event and returns an array of
|
||||||
|
* ExchangeFillEvent entities.
|
||||||
|
* @param eventLogs Raw event logs (e.g. returned from contract-wrappers).
|
||||||
|
*/
|
||||||
|
export const parseExchangeFillEvents: (
|
||||||
|
eventLogs: Array<LogWithDecodedArgs<ExchangeFillEventArgs>>,
|
||||||
|
) => ExchangeFillEvent[] = R.map(_convertToExchangeFillEvent);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses raw event logs for a cancel event and returns an array of
|
||||||
|
* ExchangeCancelEvent entities.
|
||||||
|
* @param eventLogs Raw event logs (e.g. returned from contract-wrappers).
|
||||||
|
*/
|
||||||
|
export const parseExchangeCancelEvents: (
|
||||||
|
eventLogs: Array<LogWithDecodedArgs<ExchangeCancelEventArgs>>,
|
||||||
|
) => ExchangeCancelEvent[] = R.map(_convertToExchangeCancelEvent);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses raw event logs for a CancelUpTo event and returns an array of
|
||||||
|
* ExchangeCancelUpToEvent entities.
|
||||||
|
* @param eventLogs Raw event logs (e.g. returned from contract-wrappers).
|
||||||
|
*/
|
||||||
|
export const parseExchangeCancelUpToEvents: (
|
||||||
|
eventLogs: Array<LogWithDecodedArgs<ExchangeCancelUpToEventArgs>>,
|
||||||
|
) => ExchangeCancelUpToEvent[] = R.map(_convertToExchangeCancelUpToEvent);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a raw event log for a fill event into an ExchangeFillEvent entity.
|
||||||
|
* @param eventLog Raw event log (e.g. returned from contract-wrappers).
|
||||||
|
*/
|
||||||
|
export function _convertToExchangeFillEvent(eventLog: LogWithDecodedArgs<ExchangeFillEventArgs>): ExchangeFillEvent {
|
||||||
|
const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData);
|
||||||
|
const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721';
|
||||||
|
const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData);
|
||||||
|
const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721';
|
||||||
|
const exchangeFillEvent = new ExchangeFillEvent();
|
||||||
|
exchangeFillEvent.contractAddress = eventLog.address as string;
|
||||||
|
exchangeFillEvent.blockNumber = eventLog.blockNumber as number;
|
||||||
|
exchangeFillEvent.logIndex = eventLog.logIndex as number;
|
||||||
|
exchangeFillEvent.rawData = eventLog.data as string;
|
||||||
|
exchangeFillEvent.transactionHash = eventLog.transactionHash;
|
||||||
|
exchangeFillEvent.makerAddress = eventLog.args.makerAddress;
|
||||||
|
exchangeFillEvent.takerAddress = eventLog.args.takerAddress;
|
||||||
|
exchangeFillEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress;
|
||||||
|
exchangeFillEvent.senderAddress = eventLog.args.senderAddress;
|
||||||
|
exchangeFillEvent.makerAssetFilledAmount = eventLog.args.makerAssetFilledAmount;
|
||||||
|
exchangeFillEvent.takerAssetFilledAmount = eventLog.args.takerAssetFilledAmount;
|
||||||
|
exchangeFillEvent.makerFeePaid = eventLog.args.makerFeePaid;
|
||||||
|
exchangeFillEvent.takerFeePaid = eventLog.args.takerFeePaid;
|
||||||
|
exchangeFillEvent.orderHash = eventLog.args.orderHash;
|
||||||
|
exchangeFillEvent.rawMakerAssetData = eventLog.args.makerAssetData;
|
||||||
|
exchangeFillEvent.makerAssetType = makerAssetType;
|
||||||
|
exchangeFillEvent.makerAssetProxyId = makerAssetData.assetProxyId;
|
||||||
|
exchangeFillEvent.makerTokenAddress = makerAssetData.tokenAddress;
|
||||||
|
// tslint has a false positive here. Type assertion is required.
|
||||||
|
// tslint:disable-next-line:no-unnecessary-type-assertion
|
||||||
|
exchangeFillEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId);
|
||||||
|
exchangeFillEvent.rawTakerAssetData = eventLog.args.takerAssetData;
|
||||||
|
exchangeFillEvent.takerAssetType = takerAssetType;
|
||||||
|
exchangeFillEvent.takerAssetProxyId = takerAssetData.assetProxyId;
|
||||||
|
exchangeFillEvent.takerTokenAddress = takerAssetData.tokenAddress;
|
||||||
|
// tslint:disable-next-line:no-unnecessary-type-assertion
|
||||||
|
exchangeFillEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId);
|
||||||
|
return exchangeFillEvent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a raw event log for a cancel event into an ExchangeCancelEvent
|
||||||
|
* entity.
|
||||||
|
* @param eventLog Raw event log (e.g. returned from contract-wrappers).
|
||||||
|
*/
|
||||||
|
export function _convertToExchangeCancelEvent(
|
||||||
|
eventLog: LogWithDecodedArgs<ExchangeCancelEventArgs>,
|
||||||
|
): ExchangeCancelEvent {
|
||||||
|
const makerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.makerAssetData);
|
||||||
|
const makerAssetType = makerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721';
|
||||||
|
const takerAssetData = assetDataUtils.decodeAssetDataOrThrow(eventLog.args.takerAssetData);
|
||||||
|
const takerAssetType = takerAssetData.assetProxyId === AssetProxyId.ERC20 ? 'erc20' : 'erc721';
|
||||||
|
const exchangeCancelEvent = new ExchangeCancelEvent();
|
||||||
|
exchangeCancelEvent.contractAddress = eventLog.address as string;
|
||||||
|
exchangeCancelEvent.blockNumber = eventLog.blockNumber as number;
|
||||||
|
exchangeCancelEvent.logIndex = eventLog.logIndex as number;
|
||||||
|
exchangeCancelEvent.rawData = eventLog.data as string;
|
||||||
|
exchangeCancelEvent.transactionHash = eventLog.transactionHash;
|
||||||
|
exchangeCancelEvent.makerAddress = eventLog.args.makerAddress;
|
||||||
|
exchangeCancelEvent.takerAddress = eventLog.args.takerAddress;
|
||||||
|
exchangeCancelEvent.feeRecipientAddress = eventLog.args.feeRecipientAddress;
|
||||||
|
exchangeCancelEvent.senderAddress = eventLog.args.senderAddress;
|
||||||
|
exchangeCancelEvent.orderHash = eventLog.args.orderHash;
|
||||||
|
exchangeCancelEvent.rawMakerAssetData = eventLog.args.makerAssetData;
|
||||||
|
exchangeCancelEvent.makerAssetType = makerAssetType;
|
||||||
|
exchangeCancelEvent.makerAssetProxyId = makerAssetData.assetProxyId;
|
||||||
|
exchangeCancelEvent.makerTokenAddress = makerAssetData.tokenAddress;
|
||||||
|
// tslint:disable-next-line:no-unnecessary-type-assertion
|
||||||
|
exchangeCancelEvent.makerTokenId = bigNumbertoStringOrNull((makerAssetData as ERC721AssetData).tokenId);
|
||||||
|
exchangeCancelEvent.rawTakerAssetData = eventLog.args.takerAssetData;
|
||||||
|
exchangeCancelEvent.takerAssetType = takerAssetType;
|
||||||
|
exchangeCancelEvent.takerAssetProxyId = takerAssetData.assetProxyId;
|
||||||
|
exchangeCancelEvent.takerTokenAddress = takerAssetData.tokenAddress;
|
||||||
|
// tslint:disable-next-line:no-unnecessary-type-assertion
|
||||||
|
exchangeCancelEvent.takerTokenId = bigNumbertoStringOrNull((takerAssetData as ERC721AssetData).tokenId);
|
||||||
|
return exchangeCancelEvent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a raw event log for a cancelUpTo event into an
|
||||||
|
* ExchangeCancelUpToEvent entity.
|
||||||
|
* @param eventLog Raw event log (e.g. returned from contract-wrappers).
|
||||||
|
*/
|
||||||
|
export function _convertToExchangeCancelUpToEvent(
|
||||||
|
eventLog: LogWithDecodedArgs<ExchangeCancelUpToEventArgs>,
|
||||||
|
): ExchangeCancelUpToEvent {
|
||||||
|
const exchangeCancelUpToEvent = new ExchangeCancelUpToEvent();
|
||||||
|
exchangeCancelUpToEvent.contractAddress = eventLog.address as string;
|
||||||
|
exchangeCancelUpToEvent.blockNumber = eventLog.blockNumber as number;
|
||||||
|
exchangeCancelUpToEvent.logIndex = eventLog.logIndex as number;
|
||||||
|
exchangeCancelUpToEvent.rawData = eventLog.data as string;
|
||||||
|
exchangeCancelUpToEvent.transactionHash = eventLog.transactionHash;
|
||||||
|
exchangeCancelUpToEvent.makerAddress = eventLog.args.makerAddress;
|
||||||
|
exchangeCancelUpToEvent.senderAddress = eventLog.args.senderAddress;
|
||||||
|
exchangeCancelUpToEvent.orderEpoch = eventLog.args.orderEpoch;
|
||||||
|
return exchangeCancelUpToEvent;
|
||||||
|
}
|
@ -0,0 +1,38 @@
|
|||||||
|
import { CryptoCompareOHLCVRecord } from '../../data_sources/ohlcv_external/crypto_compare';
|
||||||
|
import { OHLCVExternal } from '../../entities';
|
||||||
|
|
||||||
|
const ONE_SECOND = 1000; // Crypto Compare uses timestamps in seconds instead of milliseconds
|
||||||
|
|
||||||
|
export interface OHLCVMetadata {
|
||||||
|
exchange: string;
|
||||||
|
fromSymbol: string;
|
||||||
|
toSymbol: string;
|
||||||
|
source: string;
|
||||||
|
observedTimestamp: number;
|
||||||
|
interval: number;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Parses OHLCV records from Crypto Compare into an array of OHLCVExternal entities
|
||||||
|
* @param rawRecords an array of OHLCV records from Crypto Compare (not the full response)
|
||||||
|
*/
|
||||||
|
export function parseRecords(rawRecords: CryptoCompareOHLCVRecord[], metadata: OHLCVMetadata): OHLCVExternal[] {
|
||||||
|
return rawRecords.map(rec => {
|
||||||
|
const ohlcvRecord = new OHLCVExternal();
|
||||||
|
ohlcvRecord.exchange = metadata.exchange;
|
||||||
|
ohlcvRecord.fromSymbol = metadata.fromSymbol;
|
||||||
|
ohlcvRecord.toSymbol = metadata.toSymbol;
|
||||||
|
ohlcvRecord.startTime = rec.time * ONE_SECOND - metadata.interval;
|
||||||
|
ohlcvRecord.endTime = rec.time * ONE_SECOND;
|
||||||
|
|
||||||
|
ohlcvRecord.open = rec.open;
|
||||||
|
ohlcvRecord.close = rec.close;
|
||||||
|
ohlcvRecord.low = rec.low;
|
||||||
|
ohlcvRecord.high = rec.high;
|
||||||
|
ohlcvRecord.volumeFrom = rec.volumefrom;
|
||||||
|
ohlcvRecord.volumeTo = rec.volumeto;
|
||||||
|
|
||||||
|
ohlcvRecord.source = metadata.source;
|
||||||
|
ohlcvRecord.observedTimestamp = metadata.observedTimestamp;
|
||||||
|
return ohlcvRecord;
|
||||||
|
});
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user