Update README. Run precommit on all files
This commit is contained in:
parent
214921602e
commit
9e6f79940b
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,5 +1,7 @@
|
|||||||
# venv and test cache files
|
# venv and test cache files
|
||||||
env/
|
env/
|
||||||
__pycache__
|
__pycache__
|
||||||
|
.mypy_cache
|
||||||
|
|
||||||
*.swp
|
*.swp
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ repos:
|
|||||||
rev: 20.8b1
|
rev: 20.8b1
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
language_version: python3.9.6
|
language_version: python3.9
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: pylint
|
- id: pylint
|
||||||
|
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
3.9.4
|
12
README.md
12
README.md
@ -5,7 +5,7 @@ Requirements:
|
|||||||
|
|
||||||
* python3 and pip3
|
* python3 and pip3
|
||||||
|
|
||||||
Instructions:
|
Instructions to run:
|
||||||
|
|
||||||
* Setup a virtual enviroment to manage dependencies (optional)
|
* Setup a virtual enviroment to manage dependencies (optional)
|
||||||
* `python3 -m venv env`
|
* `python3 -m venv env`
|
||||||
@ -17,3 +17,13 @@ Instructions:
|
|||||||
* `pip3 install -r requirements.txt`
|
* `pip3 install -r requirements.txt`
|
||||||
* Run tests for token flow
|
* Run tests for token flow
|
||||||
* `python -m unittest tests/tokenflow_test.py`
|
* `python -m unittest tests/tokenflow_test.py`
|
||||||
|
|
||||||
|
If contributing:
|
||||||
|
* Install dev libraries
|
||||||
|
* `pip3 install -r requirements_dev.txt`
|
||||||
|
* Setup pre-commit
|
||||||
|
* `pre-commit install`
|
||||||
|
* Install dependencies and verify it's working
|
||||||
|
* `pre-commit run --all-files`
|
||||||
|
* If you see "failed to find interpreter for..." it means you're missing the correct python version
|
||||||
|
* The current version is python3.9 - [pyenv](https://github.com/pyenv/pyenv) is a great option for managing python versions
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
@ -7,7 +6,7 @@ from web3 import Web3
|
|||||||
from mev_inspect.schemas import Block, BlockCall, BlockCallType
|
from mev_inspect.schemas import Block, BlockCall, BlockCallType
|
||||||
|
|
||||||
|
|
||||||
cache_directory = './cache'
|
cache_directory = "./cache"
|
||||||
|
|
||||||
|
|
||||||
## Creates a block object, either from the cache or from the chain itself
|
## Creates a block object, either from the cache or from the chain itself
|
||||||
@ -16,17 +15,12 @@ cache_directory = './cache'
|
|||||||
def createFromBlockNumber(block_number: int, base_provider) -> Block:
|
def createFromBlockNumber(block_number: int, base_provider) -> Block:
|
||||||
cache_path = _get_cache_path(block_number)
|
cache_path = _get_cache_path(block_number)
|
||||||
|
|
||||||
if (cache_path.is_file()):
|
if cache_path.is_file():
|
||||||
print(
|
print(f"Cache for block {block_number} exists, " "loading data from cache")
|
||||||
f'Cache for block {block_number} exists, ' \
|
|
||||||
'loading data from cache'
|
|
||||||
)
|
|
||||||
|
|
||||||
return Block.parse_file(cache_path)
|
return Block.parse_file(cache_path)
|
||||||
else:
|
else:
|
||||||
print(
|
print(f"Cache for block {block_number} did not exist, getting data")
|
||||||
f"Cache for block {block_number} did not exist, getting data"
|
|
||||||
)
|
|
||||||
|
|
||||||
w3 = Web3(base_provider)
|
w3 = Web3(base_provider)
|
||||||
block = fetch_block(w3, base_provider, block_number)
|
block = fetch_block(w3, base_provider, block_number)
|
||||||
@ -39,36 +33,37 @@ def createFromBlockNumber(block_number: int, base_provider) -> Block:
|
|||||||
def fetch_block(w3, base_provider, block_number: int) -> Block:
|
def fetch_block(w3, base_provider, block_number: int) -> Block:
|
||||||
## Get block data
|
## Get block data
|
||||||
block_data = w3.eth.get_block(block_number, True)
|
block_data = w3.eth.get_block(block_number, True)
|
||||||
|
|
||||||
## Get the block receipts
|
## Get the block receipts
|
||||||
## TODO: evaluate whether or not this is sufficient or if gas used needs to be converted to a proper big number.
|
## TODO: evaluate whether or not this is sufficient or if gas used needs to be converted to a proper big number.
|
||||||
## In inspect-ts it needed to be converted
|
## In inspect-ts it needed to be converted
|
||||||
block_receipts_raw = base_provider.make_request("eth_getBlockReceipts", [block_number])
|
block_receipts_raw = base_provider.make_request(
|
||||||
|
"eth_getBlockReceipts", [block_number]
|
||||||
|
)
|
||||||
|
|
||||||
## Trace the whole block, return those calls
|
## Trace the whole block, return those calls
|
||||||
block_calls_json = w3.parity.trace_block(block_number)
|
block_calls_json = w3.parity.trace_block(block_number)
|
||||||
block_calls = [
|
block_calls = [BlockCall(**call_json) for call_json in block_calls_json]
|
||||||
BlockCall(**call_json)
|
|
||||||
for call_json in block_calls_json
|
|
||||||
]
|
|
||||||
|
|
||||||
## Get the logs
|
## Get the logs
|
||||||
block_hash = (block_data.hash).hex()
|
block_hash = (block_data.hash).hex()
|
||||||
block_logs = w3.eth.get_logs({'blockHash': block_hash})
|
block_logs = w3.eth.get_logs({"blockHash": block_hash})
|
||||||
|
|
||||||
## Get gas used by individual txs and store them too
|
## Get gas used by individual txs and store them too
|
||||||
txs_gas_data = {}
|
txs_gas_data = {}
|
||||||
|
|
||||||
for transaction in block_data['transactions']:
|
for transaction in block_data["transactions"]:
|
||||||
tx_hash = (transaction.hash).hex()
|
tx_hash = (transaction.hash).hex()
|
||||||
tx_data = w3.eth.get_transaction(tx_hash)
|
tx_data = w3.eth.get_transaction(tx_hash)
|
||||||
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
|
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
|
||||||
txs_gas_data[tx_hash] = {
|
txs_gas_data[tx_hash] = {
|
||||||
'gasUsed': tx_receipt['gasUsed'], # fix: why does this return 0 for certain txs?
|
"gasUsed": tx_receipt[
|
||||||
'gasPrice': tx_data['gasPrice'],
|
"gasUsed"
|
||||||
'netFeePaid': tx_data['gasPrice'] * tx_receipt['gasUsed']
|
], # fix: why does this return 0 for certain txs?
|
||||||
|
"gasPrice": tx_data["gasPrice"],
|
||||||
|
"netFeePaid": tx_data["gasPrice"] * tx_receipt["gasUsed"],
|
||||||
}
|
}
|
||||||
|
|
||||||
transaction_hashes = get_transaction_hashes(block_calls)
|
transaction_hashes = get_transaction_hashes(block_calls)
|
||||||
|
|
||||||
## Create a new object
|
## Create a new object
|
||||||
@ -88,7 +83,10 @@ def get_transaction_hashes(calls: List[BlockCall]) -> List[str]:
|
|||||||
|
|
||||||
for call in calls:
|
for call in calls:
|
||||||
if call.type != BlockCallType.reward:
|
if call.type != BlockCallType.reward:
|
||||||
if call.transaction_hash not in result:
|
if (
|
||||||
|
call.transaction_hash is not None
|
||||||
|
and call.transaction_hash not in result
|
||||||
|
):
|
||||||
result.append(call.transaction_hash)
|
result.append(call.transaction_hash)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -7,62 +7,72 @@ from mev_inspect.config import load_config
|
|||||||
|
|
||||||
config = load_config()
|
config = load_config()
|
||||||
|
|
||||||
uniswap_router_abi = json.loads(config['ABI']['UniswapV2Router'])
|
uniswap_router_abi = json.loads(config["ABI"]["UniswapV2Router"])
|
||||||
uniswap_router_address = (config['ADDRESSES']['UniswapV2Router'])
|
uniswap_router_address = config["ADDRESSES"]["UniswapV2Router"]
|
||||||
sushiswap_router_address = (config['ADDRESSES']['SushiswapV2Router'])
|
sushiswap_router_address = config["ADDRESSES"]["SushiswapV2Router"]
|
||||||
|
|
||||||
|
uniswap_pair_abi = json.loads(config["ABI"]["UniswapV2Pair"])
|
||||||
|
|
||||||
uniswap_pair_abi = json.loads(config['ABI']['UniswapV2Pair'])
|
|
||||||
|
|
||||||
class UniswapInspector:
|
class UniswapInspector:
|
||||||
def __init__(self, base_provider) -> None:
|
def __init__(self, base_provider) -> None:
|
||||||
self.w3 = Web3(base_provider)
|
self.w3 = Web3(base_provider)
|
||||||
|
|
||||||
self.trading_functions = self.get_trading_functions()
|
self.trading_functions = self.get_trading_functions()
|
||||||
self.uniswap_v2_router_contract = self.w3.eth.contract(abi=uniswap_router_abi, address=uniswap_router_address)
|
self.uniswap_v2_router_contract = self.w3.eth.contract(
|
||||||
|
abi=uniswap_router_abi, address=uniswap_router_address
|
||||||
|
)
|
||||||
self.uniswap_router_trade_signatures = self.get_router_signatures()
|
self.uniswap_router_trade_signatures = self.get_router_signatures()
|
||||||
|
|
||||||
self.uniswap_v2_pair_contract = self.w3.eth.contract(abi=uniswap_pair_abi)
|
self.uniswap_v2_pair_contract = self.w3.eth.contract(abi=uniswap_pair_abi)
|
||||||
self.uniswap_v2_pair_swap_signatures = self.uniswap_v2_pair_contract.functions.swap(0, 0, uniswap_router_address, "").selector ## Note the address here doesn't matter, but it must be filled out
|
self.uniswap_v2_pair_swap_signatures = (
|
||||||
self.uniswap_v2_pair_reserves_signatures = self.uniswap_v2_pair_contract.functions.getReserves().selector ## Called "checksigs" in mev-inpsect.ts
|
self.uniswap_v2_pair_contract.functions.swap(
|
||||||
|
0, 0, uniswap_router_address, ""
|
||||||
|
).selector
|
||||||
|
) ## Note the address here doesn't matter, but it must be filled out
|
||||||
|
self.uniswap_v2_pair_reserves_signatures = (
|
||||||
|
self.uniswap_v2_pair_contract.functions.getReserves().selector
|
||||||
|
) ## Called "checksigs" in mev-inpsect.ts
|
||||||
|
|
||||||
print("Built Uniswap inspector")
|
print("Built Uniswap inspector")
|
||||||
|
|
||||||
def get_trading_functions(self):
|
def get_trading_functions(self):
|
||||||
## Gets all functions used for swapping
|
## Gets all functions used for swapping
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
## For each entry in the ABI
|
## For each entry in the ABI
|
||||||
for abi in uniswap_router_abi:
|
for abi in uniswap_router_abi:
|
||||||
## Check to see if the entry is a function and if it is if the function's name starts with swap
|
## Check to see if the entry is a function and if it is if the function's name starts with swap
|
||||||
if abi['type'] == 'function' and abi['name'].startswith('swap'):
|
if abi["type"] == "function" and abi["name"].startswith("swap"):
|
||||||
## If so add it to our array
|
## If so add it to our array
|
||||||
result.append(abi['name'])
|
result.append(abi["name"])
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_router_signatures(self):
|
def get_router_signatures(self):
|
||||||
## Gets the selector / function signatures of all the router swap functions
|
## Gets the selector / function signatures of all the router swap functions
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
## For each entry in the ABI
|
## For each entry in the ABI
|
||||||
for abi in uniswap_router_abi:
|
for abi in uniswap_router_abi:
|
||||||
## Check to see if the entry is a function and if it is if the function's name starts with swap
|
## Check to see if the entry is a function and if it is if the function's name starts with swap
|
||||||
if abi['type'] == 'function' and abi['name'].startswith('swap'):
|
if abi["type"] == "function" and abi["name"].startswith("swap"):
|
||||||
## Add a parantheses
|
## Add a parantheses
|
||||||
function = abi['name'] + '('
|
function = abi["name"] + "("
|
||||||
|
|
||||||
## For each input in the function's input
|
## For each input in the function's input
|
||||||
for input in abi['inputs']:
|
for input in abi["inputs"]:
|
||||||
|
|
||||||
## Concat them into a string with commas
|
## Concat them into a string with commas
|
||||||
function = function + input['internalType'] + ','
|
function = function + input["internalType"] + ","
|
||||||
|
|
||||||
## Take off the last comma, add a ')' to close the parentheses
|
## Take off the last comma, add a ')' to close the parentheses
|
||||||
function = function[:-1] + ')'
|
function = function[:-1] + ")"
|
||||||
|
|
||||||
## The result looks like this: 'swapETHForExactTokens(uint256,address[],address,uint256)'
|
## The result looks like this: 'swapETHForExactTokens(uint256,address[],address,uint256)'
|
||||||
|
|
||||||
## Take the first 4 bytes of the sha3 hash of the above string.
|
## Take the first 4 bytes of the sha3 hash of the above string.
|
||||||
selector = (Web3.sha3(text=function)[0:4])
|
selector = Web3.sha3(text=function)[0:4]
|
||||||
|
|
||||||
## Add that to an array
|
## Add that to an array
|
||||||
result.append(selector)
|
result.append(selector)
|
||||||
@ -70,12 +80,13 @@ class UniswapInspector:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def inspect(self, calls):
|
def inspect(self, calls):
|
||||||
result = []
|
|
||||||
|
|
||||||
trade_calls = []
|
|
||||||
|
|
||||||
for call in calls:
|
for call in calls:
|
||||||
print('\n',call)
|
print("\n", call)
|
||||||
if (call['action']['to'] == uniswap_router_address.lower() or call['action']['to'] == sushiswap_router_address.lower()) and utils.check_call_for_signature(call, self.uniswap_router_trade_signatures):
|
if (
|
||||||
|
call["action"]["to"] == uniswap_router_address.lower()
|
||||||
|
or call["action"]["to"] == sushiswap_router_address.lower()
|
||||||
|
) and utils.check_call_for_signature(
|
||||||
|
call, self.uniswap_router_trade_signatures
|
||||||
|
):
|
||||||
# print("WIP, here is where there is a call that matches what we are looking for")
|
# print("WIP, here is where there is a call that matches what we are looking for")
|
||||||
1 == 1
|
1 == 1
|
||||||
|
@ -9,10 +9,7 @@ class Processor:
|
|||||||
def get_transaction_evaluations(self, block_data):
|
def get_transaction_evaluations(self, block_data):
|
||||||
for transaction_hash in block_data.transaction_hashes:
|
for transaction_hash in block_data.transaction_hashes:
|
||||||
calls = block_data.get_filtered_calls(transaction_hash)
|
calls = block_data.get_filtered_calls(transaction_hash)
|
||||||
calls_json = [
|
calls_json = [to_original_json_dict(call) for call in calls]
|
||||||
to_original_json_dict(call)
|
|
||||||
for call in calls
|
|
||||||
]
|
|
||||||
|
|
||||||
for inspector in self.inspectors:
|
for inspector in self.inspectors:
|
||||||
inspector.inspect(calls_json)
|
inspector.inspect(calls_json)
|
||||||
|
@ -1,9 +1,6 @@
|
|||||||
import json
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from .utils import CamelModel, Web3Model
|
from .utils import CamelModel, Web3Model
|
||||||
|
|
||||||
|
|
||||||
@ -38,7 +35,4 @@ class Block(Web3Model):
|
|||||||
txs_gas_data: Dict[str, dict]
|
txs_gas_data: Dict[str, dict]
|
||||||
|
|
||||||
def get_filtered_calls(self, hash: str) -> List[BlockCall]:
|
def get_filtered_calls(self, hash: str) -> List[BlockCall]:
|
||||||
return [
|
return [call for call in self.calls if call.transaction_hash == hash]
|
||||||
call for call in self.calls
|
|
||||||
if call.transaction_hash == hash
|
|
||||||
]
|
|
||||||
|
@ -6,9 +6,8 @@ from web3.datastructures import AttributeDict
|
|||||||
|
|
||||||
|
|
||||||
def to_camel(string: str) -> str:
|
def to_camel(string: str) -> str:
|
||||||
return ''.join(
|
return "".join(
|
||||||
word.capitalize() if i > 0 else word
|
word.capitalize() if i > 0 else word for i, word in enumerate(string.split("_"))
|
||||||
for i, word in enumerate(string.split('_'))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,186 +5,222 @@ from mev_inspect.config import load_config
|
|||||||
|
|
||||||
config = load_config()
|
config = load_config()
|
||||||
|
|
||||||
rpc_url = config['RPC']['Endpoint']
|
rpc_url = config["RPC"]["Endpoint"]
|
||||||
weth_address = config['ADDRESSES']['WETH']
|
weth_address = config["ADDRESSES"]["WETH"]
|
||||||
# w3 = Web3(HTTPProvider(rpc_url))
|
# w3 = Web3(HTTPProvider(rpc_url))
|
||||||
|
|
||||||
cache_directory = './cache'
|
cache_directory = "./cache"
|
||||||
|
|
||||||
|
|
||||||
def get_tx_traces(txHash, blockNo):
|
def get_tx_traces(txHash, blockNo):
|
||||||
# block_calls = w3.parity.trace_block(10803840)
|
# block_calls = w3.parity.trace_block(10803840)
|
||||||
cache_file = '{cacheDirectory}/{blockNumber}-new.json'.format(cacheDirectory=cache_directory, blockNumber=blockNo)
|
cache_file = "{cacheDirectory}/{blockNumber}-new.json".format(
|
||||||
|
cacheDirectory=cache_directory, blockNumber=blockNo
|
||||||
|
)
|
||||||
file_exists = Path(cache_file).is_file()
|
file_exists = Path(cache_file).is_file()
|
||||||
|
|
||||||
tx_traces = []
|
tx_traces = []
|
||||||
# if have the traces cached
|
# if have the traces cached
|
||||||
if(file_exists):
|
if file_exists:
|
||||||
block_file = open(cache_file)
|
block_file = open(cache_file)
|
||||||
block_json = json.load(block_file)
|
block_json = json.load(block_file)
|
||||||
for call in block_json['calls']:
|
for call in block_json["calls"]:
|
||||||
if call['transactionHash'] == txHash:
|
if call["transactionHash"] == txHash:
|
||||||
tx_traces.append(call)
|
tx_traces.append(call)
|
||||||
block_file.close()
|
block_file.close()
|
||||||
else:
|
else:
|
||||||
# todo, fetch and cache traces that don't exist
|
# todo, fetch and cache traces that don't exist
|
||||||
# depending on the best way to call block.py from here
|
# depending on the best way to call block.py from here
|
||||||
print("traces do not exist")
|
print("traces do not exist")
|
||||||
|
|
||||||
return(tx_traces)
|
return tx_traces
|
||||||
|
|
||||||
|
|
||||||
def is_stablecoin_address(address):
|
def is_stablecoin_address(address):
|
||||||
# to look for stablecoin inflow/outflows
|
# to look for stablecoin inflow/outflows
|
||||||
stablecoin_addresses = [
|
stablecoin_addresses = [
|
||||||
"0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", # USDC
|
"0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", # USDC
|
||||||
"0xdac17f958d2ee523a2206206994597c13d831ec7", # USDT
|
"0xdac17f958d2ee523a2206206994597c13d831ec7", # USDT
|
||||||
"0x6b175474e89094c44da98b954eedeac495271d0f", # DAI
|
"0x6b175474e89094c44da98b954eedeac495271d0f", # DAI
|
||||||
"0x0000000000085d4780b73119b644ae5ecd22b376", # TUSD
|
"0x0000000000085d4780b73119b644ae5ecd22b376", # TUSD
|
||||||
"0x4fabb145d64652a948d72533023f6e7a623c7c53", # BUSD
|
"0x4fabb145d64652a948d72533023f6e7a623c7c53", # BUSD
|
||||||
"0x8e870d67f660d95d5be530380d0ec0bd388289e1", # PAX
|
"0x8e870d67f660d95d5be530380d0ec0bd388289e1", # PAX
|
||||||
"0x956F47F50A910163D8BF957Cf5846D573E7f87CA", # FEI
|
"0x956F47F50A910163D8BF957Cf5846D573E7f87CA", # FEI
|
||||||
"0x853d955aCEf822Db058eb8505911ED77F175b99e", # FRAX
|
"0x853d955aCEf822Db058eb8505911ED77F175b99e", # FRAX
|
||||||
"0xBC6DA0FE9aD5f3b0d58160288917AA56653660E9", # alUSD
|
"0xBC6DA0FE9aD5f3b0d58160288917AA56653660E9", # alUSD
|
||||||
"0x57Ab1ec28D129707052df4dF418D58a2D46d5f51", # sUSD
|
"0x57Ab1ec28D129707052df4dF418D58a2D46d5f51", # sUSD
|
||||||
"0x5f98805A4E8be255a32880FDeC7F6728C6568bA0", # lUSD
|
"0x5f98805A4E8be255a32880FDeC7F6728C6568bA0", # lUSD
|
||||||
"0x674C6Ad92Fd080e4004b2312b45f796a192D27a0", # USDN
|
"0x674C6Ad92Fd080e4004b2312b45f796a192D27a0", # USDN
|
||||||
]
|
]
|
||||||
return address in stablecoin_addresses
|
return address in stablecoin_addresses
|
||||||
|
|
||||||
|
|
||||||
def is_known_router_address(address):
|
def is_known_router_address(address):
|
||||||
# to exclude known router addresses from token flow analysis
|
# to exclude known router addresses from token flow analysis
|
||||||
known_router_addresses = [
|
known_router_addresses = [
|
||||||
"0x3D71d79C224998E608d03C5Ec9B405E7a38505F0", # keeper dao, whitelists extraction
|
"0x3D71d79C224998E608d03C5Ec9B405E7a38505F0", # keeper dao, whitelists extraction
|
||||||
"0x11111254369792b2Ca5d084aB5eEA397cA8fa48B", # 1inch v1 router
|
"0x11111254369792b2Ca5d084aB5eEA397cA8fa48B", # 1inch v1 router
|
||||||
"0x111111125434b319222cdbf8c261674adb56f3ae", # 1inch v2 router
|
"0x111111125434b319222cdbf8c261674adb56f3ae", # 1inch v2 router
|
||||||
"0x11111112542d85b3ef69ae05771c2dccff4faa26", # 1inch v3 router
|
"0x11111112542d85b3ef69ae05771c2dccff4faa26", # 1inch v3 router
|
||||||
"0xa356867fdcea8e71aeaf87805808803806231fdc", # DODO
|
"0xa356867fdcea8e71aeaf87805808803806231fdc", # DODO
|
||||||
"0xdef1c0ded9bec7f1a1670819833240f027b25eff", # 0x proxy
|
"0xdef1c0ded9bec7f1a1670819833240f027b25eff", # 0x proxy
|
||||||
"0x90f765f63e7dc5ae97d6c576bf693fb6af41c129", # Set Trade
|
"0x90f765f63e7dc5ae97d6c576bf693fb6af41c129", # Set Trade
|
||||||
"0x7113dd99c79aff93d54cfa4b2885576535a132de", # Totle exchange
|
"0x7113dd99c79aff93d54cfa4b2885576535a132de", # Totle exchange
|
||||||
"0x9509665d015bfe3c77aa5ad6ca20c8afa1d98989", # Paraswap
|
"0x9509665d015bfe3c77aa5ad6ca20c8afa1d98989", # Paraswap
|
||||||
"0x86969d29F5fd327E1009bA66072BE22DB6017cC6", # Paraswap v2
|
"0x86969d29F5fd327E1009bA66072BE22DB6017cC6", # Paraswap v2
|
||||||
"0xf90e98f3d8dce44632e5020abf2e122e0f99dfab", # Paraswap v3
|
"0xf90e98f3d8dce44632e5020abf2e122e0f99dfab", # Paraswap v3
|
||||||
"0x57805e5a227937bac2b0fdacaa30413ddac6b8e1", # Furucombo
|
"0x57805e5a227937bac2b0fdacaa30413ddac6b8e1", # Furucombo
|
||||||
"0x17e8ca1b4798b97602895f63206afcd1fc90ca5f", # Furucombo proxy
|
"0x17e8ca1b4798b97602895f63206afcd1fc90ca5f", # Furucombo proxy
|
||||||
"0x881d40237659c251811cec9c364ef91dc08d300c", # Metamask swap
|
"0x881d40237659c251811cec9c364ef91dc08d300c", # Metamask swap
|
||||||
"0x745daa146934b27e3f0b6bff1a6e36b9b90fb131", # DEX.ag
|
"0x745daa146934b27e3f0b6bff1a6e36b9b90fb131", # DEX.ag
|
||||||
"0xb2be281e8b11b47fec825973fc8bb95332022a54", # Zerion SDK
|
"0xb2be281e8b11b47fec825973fc8bb95332022a54", # Zerion SDK
|
||||||
"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D", # UniswapV2Router02
|
"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D", # UniswapV2Router02
|
||||||
"0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F", # SushiswapV2Router02
|
"0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F", # SushiswapV2Router02
|
||||||
"0xE592427A0AEce92De3Edee1F18E0157C05861564", # Uniswap v3 router
|
"0xE592427A0AEce92De3Edee1F18E0157C05861564", # Uniswap v3 router
|
||||||
"0x3E66B66Fd1d0b02fDa6C811Da9E0547970DB2f21", # Balance exchange proxy
|
"0x3E66B66Fd1d0b02fDa6C811Da9E0547970DB2f21", # Balance exchange proxy
|
||||||
"0x1bD435F3C054b6e901B7b108a0ab7617C808677b", # Paraswap v4
|
"0x1bD435F3C054b6e901B7b108a0ab7617C808677b", # Paraswap v4
|
||||||
"0xC011a73ee8576Fb46F5E1c5751cA3B9Fe0af2a6F" # SNX proxy synth issuer
|
"0xC011a73ee8576Fb46F5E1c5751cA3B9Fe0af2a6F", # SNX proxy synth issuer
|
||||||
]
|
]
|
||||||
return address in known_router_addresses
|
return address in known_router_addresses
|
||||||
|
|
||||||
|
|
||||||
# we're interested in the to address to run token flow on it as well
|
# we're interested in the to address to run token flow on it as well
|
||||||
def get_tx_to_address(txHash, blockNo):
|
def get_tx_to_address(txHash, blockNo):
|
||||||
cache_file = '{cacheDirectory}/{blockNumber}-new.json'.format(cacheDirectory=cache_directory, blockNumber=blockNo)
|
cache_file = "{cacheDirectory}/{blockNumber}-new.json".format(
|
||||||
|
cacheDirectory=cache_directory, blockNumber=blockNo
|
||||||
|
)
|
||||||
block_file = open(cache_file)
|
block_file = open(cache_file)
|
||||||
block_json = json.load(block_file)
|
block_json = json.load(block_file)
|
||||||
for receipt in block_json['receipts']['result']:
|
for receipt in block_json["receipts"]["result"]:
|
||||||
if receipt['transactionHash'] == txHash:
|
if receipt["transactionHash"] == txHash:
|
||||||
block_file.close()
|
block_file.close()
|
||||||
return receipt['to']
|
return receipt["to"]
|
||||||
|
|
||||||
|
|
||||||
def get_tx_proxies(tx_traces, to_address):
|
def get_tx_proxies(tx_traces, to_address):
|
||||||
proxies = []
|
proxies = []
|
||||||
for trace in tx_traces:
|
for trace in tx_traces:
|
||||||
if(trace['type'] == 'call' and trace['action']['callType'] == 'delegatecall' and trace['action']['from'] == to_address):
|
if (
|
||||||
proxies.append(trace['action']['to'])
|
trace["type"] == "call"
|
||||||
return(proxies)
|
and trace["action"]["callType"] == "delegatecall"
|
||||||
|
and trace["action"]["from"] == to_address
|
||||||
|
):
|
||||||
|
proxies.append(trace["action"]["to"])
|
||||||
|
return proxies
|
||||||
|
|
||||||
|
|
||||||
def get_net_gas_used(txHash, blockNo):
|
def get_net_gas_used(txHash, blockNo):
|
||||||
cache_file = '{cacheDirectory}/{blockNumber}.json'.format(cacheDirectory=cache_directory, blockNumber=blockNo)
|
cache_file = "{cacheDirectory}/{blockNumber}.json".format(
|
||||||
|
cacheDirectory=cache_directory, blockNumber=blockNo
|
||||||
|
)
|
||||||
block_file = open(cache_file)
|
block_file = open(cache_file)
|
||||||
block_json = json.load(block_file)
|
block_json = json.load(block_file)
|
||||||
gas_used = 0
|
gas_used = 0
|
||||||
for trace in block_json['calls']:
|
for trace in block_json["calls"]:
|
||||||
if trace['transactionHash'] == txHash:
|
if trace["transactionHash"] == txHash:
|
||||||
gas_used = gas_used + int(trace['result']['gasUsed'],16)
|
gas_used = gas_used + int(trace["result"]["gasUsed"], 16)
|
||||||
print(gas_used)
|
print(gas_used)
|
||||||
|
|
||||||
|
|
||||||
def get_ether_flows(tx_traces, addresses_to_check):
|
def get_ether_flows(tx_traces, addresses_to_check):
|
||||||
eth_inflow = 0
|
eth_inflow = 0
|
||||||
eth_outflow = 0
|
eth_outflow = 0
|
||||||
|
|
||||||
for trace in tx_traces:
|
for trace in tx_traces:
|
||||||
if(trace['type'] == 'call'):
|
if trace["type"] == "call":
|
||||||
value = int(trace['action']['value'], 16) # converting from 0x prefix to decimal
|
value = int(
|
||||||
|
trace["action"]["value"], 16
|
||||||
|
) # converting from 0x prefix to decimal
|
||||||
# ETH_GET
|
# ETH_GET
|
||||||
if(trace['action']['callType'] != 'delegatecall' and trace['action']['from'] != weth_address and value > 0 and trace['action']['to'] in addresses_to_check):
|
if (
|
||||||
|
trace["action"]["callType"] != "delegatecall"
|
||||||
|
and trace["action"]["from"] != weth_address
|
||||||
|
and value > 0
|
||||||
|
and trace["action"]["to"] in addresses_to_check
|
||||||
|
):
|
||||||
eth_inflow = eth_inflow + value
|
eth_inflow = eth_inflow + value
|
||||||
|
|
||||||
# ETH_GIVE
|
# ETH_GIVE
|
||||||
if(trace['action']['callType'] != 'delegatecall' and trace['action']['to'] != weth_address and value > 0 and trace['action']['from'] in addresses_to_check):
|
if (
|
||||||
|
trace["action"]["callType"] != "delegatecall"
|
||||||
|
and trace["action"]["to"] != weth_address
|
||||||
|
and value > 0
|
||||||
|
and trace["action"]["from"] in addresses_to_check
|
||||||
|
):
|
||||||
eth_outflow = eth_outflow + value
|
eth_outflow = eth_outflow + value
|
||||||
|
|
||||||
if(trace['action']['to'] == weth_address):
|
if trace["action"]["to"] == weth_address:
|
||||||
# WETH_GET1 & WETH_GET2 (to account for both 'transfer' and 'transferFrom' methods)
|
# WETH_GET1 & WETH_GET2 (to account for both 'transfer' and 'transferFrom' methods)
|
||||||
# WETH_GIVE1 & WETH_GIVE2
|
# WETH_GIVE1 & WETH_GIVE2
|
||||||
|
|
||||||
# transfer(address to,uint256 value) with args
|
# transfer(address to,uint256 value) with args
|
||||||
if(len(trace['action']['input']) == 138):
|
if len(trace["action"]["input"]) == 138:
|
||||||
if(trace['action']['input'][2:10] == "a9059cbb"):
|
if trace["action"]["input"][2:10] == "a9059cbb":
|
||||||
transfer_to = '0x' + trace['action']['input'][34:74]
|
transfer_to = "0x" + trace["action"]["input"][34:74]
|
||||||
transfer_value = int('0x' + trace['action']['input'][74:138], 16)
|
transfer_value = int(
|
||||||
if(transfer_to in addresses_to_check):
|
"0x" + trace["action"]["input"][74:138], 16
|
||||||
|
)
|
||||||
|
if transfer_to in addresses_to_check:
|
||||||
eth_inflow = eth_inflow + transfer_value
|
eth_inflow = eth_inflow + transfer_value
|
||||||
elif(trace['action']['from'] in addresses_to_check):
|
elif trace["action"]["from"] in addresses_to_check:
|
||||||
eth_outflow = eth_outflow + transfer_value
|
eth_outflow = eth_outflow + transfer_value
|
||||||
|
|
||||||
# transferFrom(address from,address to,uint256 value )
|
# transferFrom(address from,address to,uint256 value )
|
||||||
if(len(trace['action']['input']) == 202):
|
if len(trace["action"]["input"]) == 202:
|
||||||
if(trace['action']['input'][2:10] == "23b872dd"):
|
if trace["action"]["input"][2:10] == "23b872dd":
|
||||||
transfer_from = '0x' + trace['action']['input'][34:74]
|
transfer_from = "0x" + trace["action"]["input"][34:74]
|
||||||
transfer_to = '0x' + trace['action']['input'][98:138]
|
transfer_to = "0x" + trace["action"]["input"][98:138]
|
||||||
transfer_value = int('0x' + trace['action']['input'][138:202], 16)
|
transfer_value = int(
|
||||||
if(transfer_to in addresses_to_check):
|
"0x" + trace["action"]["input"][138:202], 16
|
||||||
|
)
|
||||||
|
if transfer_to in addresses_to_check:
|
||||||
eth_inflow = eth_inflow + transfer_value
|
eth_inflow = eth_inflow + transfer_value
|
||||||
elif(transfer_from in addresses_to_check):
|
elif transfer_from in addresses_to_check:
|
||||||
eth_outflow = eth_outflow + transfer_value
|
eth_outflow = eth_outflow + transfer_value
|
||||||
|
|
||||||
if(trace['type'] == 'suicide'):
|
if trace["type"] == "suicide":
|
||||||
if(trace['action']['refundAddress'] in addresses_to_check):
|
if trace["action"]["refundAddress"] in addresses_to_check:
|
||||||
refund_value = int('0x' + trace['action']['balance'], 16)
|
refund_value = int("0x" + trace["action"]["balance"], 16)
|
||||||
eth_inflow = eth_inflow + refund_value
|
eth_inflow = eth_inflow + refund_value
|
||||||
|
|
||||||
return [eth_inflow, eth_outflow]
|
return [eth_inflow, eth_outflow]
|
||||||
|
|
||||||
|
|
||||||
def get_dollar_flows(tx_traces, addresses_to_check):
|
def get_dollar_flows(tx_traces, addresses_to_check):
|
||||||
dollar_inflow = 0
|
dollar_inflow = 0
|
||||||
dollar_outflow = 0
|
dollar_outflow = 0
|
||||||
for trace in tx_traces:
|
for trace in tx_traces:
|
||||||
if(trace['type'] == 'call' and is_stablecoin_address(trace['action']['to'])):
|
if trace["type"] == "call" and is_stablecoin_address(trace["action"]["to"]):
|
||||||
value = int(trace['action']['value'], 16) # converting from 0x prefix to decimal
|
_ = int(
|
||||||
|
trace["action"]["value"], 16
|
||||||
|
) # converting from 0x prefix to decimal
|
||||||
|
|
||||||
# USD_GET1 & USD_GET2 (to account for both 'transfer' and 'transferFrom' methods)
|
# USD_GET1 & USD_GET2 (to account for both 'transfer' and 'transferFrom' methods)
|
||||||
# USD_GIVE1 & USD_GIVE2
|
# USD_GIVE1 & USD_GIVE2
|
||||||
|
|
||||||
# transfer(address to,uint256 value) with args
|
# transfer(address to,uint256 value) with args
|
||||||
if(len(trace['action']['input']) == 138):
|
if len(trace["action"]["input"]) == 138:
|
||||||
if(trace['action']['input'][2:10] == "a9059cbb"):
|
if trace["action"]["input"][2:10] == "a9059cbb":
|
||||||
transfer_to = '0x' + trace['action']['input'][34:74]
|
transfer_to = "0x" + trace["action"]["input"][34:74]
|
||||||
transfer_value = int('0x' + trace['action']['input'][74:138], 16)
|
transfer_value = int("0x" + trace["action"]["input"][74:138], 16)
|
||||||
if(transfer_to in addresses_to_check):
|
if transfer_to in addresses_to_check:
|
||||||
dollar_inflow = dollar_inflow + transfer_value
|
dollar_inflow = dollar_inflow + transfer_value
|
||||||
elif(trace['action']['from'] in addresses_to_check):
|
elif trace["action"]["from"] in addresses_to_check:
|
||||||
dollar_outflow = dollar_outflow + transfer_value
|
dollar_outflow = dollar_outflow + transfer_value
|
||||||
|
|
||||||
# transferFrom(address from,address to,uint256 value )
|
# transferFrom(address from,address to,uint256 value )
|
||||||
if(len(trace['action']['input']) == 202):
|
if len(trace["action"]["input"]) == 202:
|
||||||
if(trace['action']['input'][2:10] == "23b872dd"):
|
if trace["action"]["input"][2:10] == "23b872dd":
|
||||||
transfer_from = '0x' + trace['action']['input'][34:74]
|
transfer_from = "0x" + trace["action"]["input"][34:74]
|
||||||
transfer_to = '0x' + trace['action']['input'][98:138]
|
transfer_to = "0x" + trace["action"]["input"][98:138]
|
||||||
transfer_value = int('0x' + trace['action']['input'][138:202], 16)
|
transfer_value = int("0x" + trace["action"]["input"][138:202], 16)
|
||||||
if(transfer_to in addresses_to_check):
|
if transfer_to in addresses_to_check:
|
||||||
dollar_inflow = dollar_inflow + transfer_value
|
dollar_inflow = dollar_inflow + transfer_value
|
||||||
elif(transfer_from in addresses_to_check):
|
elif transfer_from in addresses_to_check:
|
||||||
dollar_outflow = dollar_outflow + transfer_value
|
dollar_outflow = dollar_outflow + transfer_value
|
||||||
return [dollar_inflow, dollar_outflow]
|
return [dollar_inflow, dollar_outflow]
|
||||||
|
|
||||||
|
|
||||||
def run_tokenflow(txHash, blockNo):
|
def run_tokenflow(txHash, blockNo):
|
||||||
tx_traces = get_tx_traces(txHash, blockNo)
|
tx_traces = get_tx_traces(txHash, blockNo)
|
||||||
to_address = get_tx_to_address(txHash, blockNo)
|
to_address = get_tx_to_address(txHash, blockNo)
|
||||||
@ -194,27 +230,26 @@ def run_tokenflow(txHash, blockNo):
|
|||||||
proxies = get_tx_proxies(tx_traces, to_address)
|
proxies = get_tx_proxies(tx_traces, to_address)
|
||||||
for proxy in proxies:
|
for proxy in proxies:
|
||||||
addresses_to_check.append(proxy.lower())
|
addresses_to_check.append(proxy.lower())
|
||||||
|
|
||||||
# check if the 'to' field is a known aggregator/router
|
# check if the 'to' field is a known aggregator/router
|
||||||
# if not, add to relevant addresses to run TF on
|
# if not, add to relevant addresses to run TF on
|
||||||
if(not is_known_router_address(to_address)):
|
if not is_known_router_address(to_address):
|
||||||
addresses_to_check.append(to_address.lower()) # traces need lowercase addresses to match
|
addresses_to_check.append(
|
||||||
|
to_address.lower()
|
||||||
|
) # traces need lowercase addresses to match
|
||||||
|
|
||||||
ether_flows = get_ether_flows(tx_traces, addresses_to_check)
|
ether_flows = get_ether_flows(tx_traces, addresses_to_check)
|
||||||
dollar_flows = get_dollar_flows(tx_traces, addresses_to_check)
|
dollar_flows = get_dollar_flows(tx_traces, addresses_to_check)
|
||||||
# print(addresses_to_check)
|
# print(addresses_to_check)
|
||||||
# print('net eth flow', ether_flows[0] - ether_flows[1])
|
# print('net eth flow', ether_flows[0] - ether_flows[1])
|
||||||
# print('net dollar flow', dollar_flows )
|
# print('net dollar flow', dollar_flows )
|
||||||
return {
|
return {"ether_flows": ether_flows, "dollar_flows": dollar_flows}
|
||||||
'ether_flows': ether_flows,
|
|
||||||
'dollar_flows': dollar_flows
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# note: not the gas set by user, only gas consumed upon execution
|
# note: not the gas set by user, only gas consumed upon execution
|
||||||
def get_gas_used_by_tx(txHash):
|
# def get_gas_used_by_tx(txHash):
|
||||||
#tx_receipt = w3.eth.getTransactionReceipt(txHash)
|
# # tx_receipt = w3.eth.getTransactionReceipt(txHash)
|
||||||
return(tx_receipt['gasUsed'])
|
# return tx_receipt["gasUsed"]
|
||||||
|
|
||||||
|
|
||||||
# tx_traces = get_tx_traces('0x4121ce805d33e952b2e6103a5024f70c118432fd0370128d6d7845f9b2987922', 11930296)
|
# tx_traces = get_tx_traces('0x4121ce805d33e952b2e6103a5024f70c118432fd0370128d6d7845f9b2987922', 11930296)
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
from hexbytes.main import HexBytes
|
from hexbytes.main import HexBytes
|
||||||
|
|
||||||
|
|
||||||
def check_call_for_signature(call, signatures):
|
def check_call_for_signature(call, signatures):
|
||||||
if (call['action']['input'] == None):
|
if call["action"]["input"] == None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
## By default set this to False
|
## By default set this to False
|
||||||
signature_present_boolean = False
|
signature_present_boolean = False
|
||||||
|
|
||||||
@ -11,11 +12,11 @@ def check_call_for_signature(call, signatures):
|
|||||||
for signature in signatures:
|
for signature in signatures:
|
||||||
# print("Desired signature:", str(signature))
|
# print("Desired signature:", str(signature))
|
||||||
# print("Actual", HexBytes(call['action']['input']))
|
# print("Actual", HexBytes(call['action']['input']))
|
||||||
|
|
||||||
if HexBytes(call['action']['input']).startswith((signature)):
|
if HexBytes(call["action"]["input"]).startswith((signature)):
|
||||||
## Note that we are turning the input into hex bytes here, which seems to be fine
|
## Note that we are turning the input into hex bytes here, which seems to be fine
|
||||||
## Working with strings was doing weird things
|
## Working with strings was doing weird things
|
||||||
print("hit")
|
print("hit")
|
||||||
signature_present_boolean = True
|
signature_present_boolean = True
|
||||||
|
|
||||||
return signature_present_boolean
|
return signature_present_boolean
|
||||||
|
2
requirements_dev.txt
Normal file
2
requirements_dev.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
pre-commit==2.13.0
|
||||||
|
pylint==2.9.3
|
@ -1,16 +1,22 @@
|
|||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from web3.providers import base
|
|
||||||
from web3 import Web3
|
from web3 import Web3
|
||||||
|
|
||||||
from mev_inspect import block
|
from mev_inspect import block
|
||||||
from mev_inspect.inspector_uniswap import UniswapInspector
|
from mev_inspect.inspector_uniswap import UniswapInspector
|
||||||
from mev_inspect.processor import Processor
|
from mev_inspect.processor import Processor
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Inspect some blocks.')
|
parser = argparse.ArgumentParser(description="Inspect some blocks.")
|
||||||
parser.add_argument('-block_number', metavar='b', type=int, nargs='+',
|
parser.add_argument(
|
||||||
help='the block number you are targetting, eventually this will need to be changed')
|
"-block_number",
|
||||||
parser.add_argument('-rpc', metavar='r', help='rpc endpoint, this needs to have parity style traces')
|
metavar="b",
|
||||||
|
type=int,
|
||||||
|
nargs="+",
|
||||||
|
help="the block number you are targetting, eventually this will need to be changed",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-rpc", metavar="r", help="rpc endpoint, this needs to have parity style traces"
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
## Set up the base provider, but don't wrap it in web3 so we can make requests to it with make_request()
|
## Set up the base provider, but don't wrap it in web3 so we can make requests to it with make_request()
|
||||||
|
@ -1,20 +1,23 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from mev_inspect import inspector_compound
|
# Fails precommit because these inspectors don't exist yet
|
||||||
from mev_inspect import inspector_aave
|
# from mev_inspect import inspector_compound
|
||||||
|
# from mev_inspect import inspector_aave
|
||||||
class TestLiquidations (unittest.TestCase):
|
#
|
||||||
def test_compound_liquidation(self):
|
#
|
||||||
tx_hash = "0x0ec6d5044a47feb3ceb647bf7ea4ffc87d09244d629eeced82ba17ec66605012"
|
# class TestLiquidations(unittest.TestCase):
|
||||||
block_no = 11338848
|
# def test_compound_liquidation(self):
|
||||||
res = inspector_compound.get_profit(tx_hash, block_no)
|
# tx_hash = "0x0ec6d5044a47feb3ceb647bf7ea4ffc87d09244d629eeced82ba17ec66605012"
|
||||||
# self.assertEqual(res['profit'], 0)
|
# block_no = 11338848
|
||||||
def test_aave_liquidation(self):
|
# res = inspector_compound.get_profit(tx_hash, block_no)
|
||||||
tx_hash = "0xc8d2501d28800b1557eb64c5d0e08fd6070c15b6c04c39ca05631f641d19ffb2"
|
# # self.assertEqual(res['profit'], 0)
|
||||||
block_no = 10803840
|
#
|
||||||
res = inspector_aave.get_profit(tx_hash, block_no)
|
# def test_aave_liquidation(self):
|
||||||
# self.assertEqual(res['profit'], 0)
|
# tx_hash = "0xc8d2501d28800b1557eb64c5d0e08fd6070c15b6c04c39ca05631f641d19ffb2"
|
||||||
|
# block_no = 10803840
|
||||||
|
# res = inspector_aave.get_profit(tx_hash, block_no)
|
||||||
|
# # self.assertEqual(res['profit'], 0)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -3,27 +3,28 @@ import unittest
|
|||||||
from mev_inspect import tokenflow
|
from mev_inspect import tokenflow
|
||||||
|
|
||||||
|
|
||||||
class TestTokenFlow (unittest.TestCase):
|
class TestTokenFlow(unittest.TestCase):
|
||||||
def test_simple_arb(self):
|
def test_simple_arb(self):
|
||||||
tx_hash = "0x4121ce805d33e952b2e6103a5024f70c118432fd0370128d6d7845f9b2987922"
|
tx_hash = "0x4121ce805d33e952b2e6103a5024f70c118432fd0370128d6d7845f9b2987922"
|
||||||
block_no = 11930296
|
block_no = 11930296
|
||||||
res = tokenflow.run_tokenflow(tx_hash, block_no)
|
res = tokenflow.run_tokenflow(tx_hash, block_no)
|
||||||
self.assertEqual(res['ether_flows'], [3547869861992962562, 3499859860420296704])
|
self.assertEqual(res["ether_flows"], [3547869861992962562, 3499859860420296704])
|
||||||
self.assertEqual(res['dollar_flows'], [0,0])
|
self.assertEqual(res["dollar_flows"], [0, 0])
|
||||||
|
|
||||||
def test_arb_with_stable_flow(self):
|
def test_arb_with_stable_flow(self):
|
||||||
tx_hash = "0x496836e0bd1520388e36c79d587a31d4b3306e4f25352164178ca0667c7f9c29"
|
tx_hash = "0x496836e0bd1520388e36c79d587a31d4b3306e4f25352164178ca0667c7f9c29"
|
||||||
block_no = 11935012
|
block_no = 11935012
|
||||||
res = tokenflow.run_tokenflow(tx_hash, block_no)
|
res = tokenflow.run_tokenflow(tx_hash, block_no)
|
||||||
self.assertEqual(res['ether_flows'], [597044987302243493, 562445964778930176])
|
self.assertEqual(res["ether_flows"], [597044987302243493, 562445964778930176])
|
||||||
self.assertEqual(res['dollar_flows'], [871839781,871839781])
|
self.assertEqual(res["dollar_flows"], [871839781, 871839781])
|
||||||
|
|
||||||
def test_complex_cross_arb(self):
|
def test_complex_cross_arb(self):
|
||||||
tx_hash = "0x5ab21bfba50ad3993528c2828c63e311aafe93b40ee934790e545e150cb6ca73"
|
tx_hash = "0x5ab21bfba50ad3993528c2828c63e311aafe93b40ee934790e545e150cb6ca73"
|
||||||
block_no = 11931272
|
block_no = 11931272
|
||||||
res = tokenflow.run_tokenflow(tx_hash, block_no)
|
res = tokenflow.run_tokenflow(tx_hash, block_no)
|
||||||
self.assertEqual(res['ether_flows'], [3636400213125714803, 3559576672903063566])
|
self.assertEqual(res["ether_flows"], [3636400213125714803, 3559576672903063566])
|
||||||
self.assertEqual(res['dollar_flows'], [0,0])
|
self.assertEqual(res["dollar_flows"], [0, 0])
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user