Add logic and writing for sandwiches. Add tests too

This commit is contained in:
Luke Van Seters 2021-12-13 20:06:25 -05:00
parent 5fa7c6b567
commit f0c29e2b2f
9 changed files with 313 additions and 2 deletions

View File

@ -0,0 +1,64 @@
from typing import List
from uuid import uuid4
from mev_inspect.models.sandwiches import SandwichModel
from mev_inspect.schemas.sandwiches import Sandwich
def delete_sandwiches_for_block(
db_session,
block_number: int,
) -> None:
(
db_session.query(SandwichModel)
.filter(SandwichModel.block_number == block_number)
.delete()
)
db_session.commit()
def write_sandwiches(
db_session,
sandwiches: List[Sandwich],
) -> None:
sandwich_models = []
sandwiched_swaps = []
for sandwich in sandwiches:
sandwich_id = str(uuid4())
sandwich_models.append(
SandwichModel(
id=sandwich_id,
block_number=sandwich.block_number,
sandwicher_address=sandwich.sandwicher_address,
frontrun_swap_transaction_hash=sandwich.frontrun_swap.transaction_hash,
frontrun_swap_trace_address=sandwich.frontrun_swap.trace_address,
backrun_swap_transaction_hash=sandwich.backrun_swap.transaction_hash,
backrun_swap_trace_address=sandwich.backrun_swap.trace_address,
)
)
for swap in sandwich.sandwiched_swaps:
sandwiched_swaps.append(
{
"sandwich_id": sandwich_id,
"block_number": swap.block_number,
"transaction_hash": swap.transaction_hash,
"trace_address": swap.trace_address,
}
)
if len(sandwich_models) > 0:
db_session.bulk_save_objects(sandwich_models)
db_session.execute(
"""
INSERT INTO sandwiched_swaps
(sandwich_id, block_number, transaction_hash, trace_address)
VALUES
(:sandwich_id, :block_number, :transaction_hash, :trace_address)
""",
params=sandwiched_swaps,
)
db_session.commit()

View File

@ -34,6 +34,7 @@ from mev_inspect.crud.transfers import delete_transfers_for_block, write_transfe
from mev_inspect.liquidations import get_liquidations
from mev_inspect.miner_payments import get_miner_payments
from mev_inspect.punks import get_punk_bid_acceptances, get_punk_bids, get_punk_snipes
from mev_inspect.sandwiches import get_sandwiches
from mev_inspect.swaps import get_swaps
from mev_inspect.transfers import get_transfers
@ -99,6 +100,12 @@ async def inspect_block(
delete_liquidations_for_block(inspect_db_session, block_number)
write_liquidations(inspect_db_session, liquidations)
sandwiches = get_sandwiches(swaps)
logger.info(f"Block: {block_number} -- Found {len(sandwiches)} sandwiches")
delete_sandwiches_for_block(inspect_db_session, block_number)
write_sandwiches(inspect_db_session, sandwiches)
punk_bids = get_punk_bids(classified_traces)
delete_punk_bids_for_block(inspect_db_session, block_number)
write_punk_bids(inspect_db_session, punk_bids)

View File

@ -0,0 +1,16 @@
from sqlalchemy import Column, Numeric, String, func, TIMESTAMP, ARRAY, Integer
from .base import Base
class SandwichModel(Base):
__tablename__ = "sandwiches"
id = Column(String, primary_key=True)
created_at = Column(TIMESTAMP, server_default=func.now())
block_number = Column(Numeric, nullable=False)
sandwicher_address = Column(String(256), nullable=False)
frontrun_swap_transaction_hash = Column(String(256), nullable=False)
frontrun_swap_trace_address = Column(ARRAY(Integer), nullable=False)
backrun_swap_transaction_hash = Column(String(256), nullable=False)
backrun_swap_trace_address = Column(ARRAY(Integer), nullable=False)

59
mev_inspect/sandwiches.py Normal file
View File

@ -0,0 +1,59 @@
from typing import List, Optional
from mev_inspect.schemas.sandwiches import Sandwich
from mev_inspect.schemas.swaps import Swap
def get_sandwiches(swaps: List[Swap]) -> List[Sandwich]:
ordered_swaps = list(
sorted(
swaps,
key=lambda swap: (swap.transaction_position, swap.trace_address),
)
)
sandwiches: List[Sandwich] = []
for index, swap in enumerate(ordered_swaps):
rest_swaps = ordered_swaps[index + 1 :]
sandwich = _get_sandwich_starting_with_swap(swap, rest_swaps)
if sandwich is not None:
sandwiches.append(sandwich)
return sandwiches
def _get_sandwich_starting_with_swap(
front_swap: Swap,
rest_swaps: List[Swap],
) -> Optional[Sandwich]:
sandwicher_address = front_swap.from_address
sandwiched_swaps = []
for other_swap in rest_swaps:
if other_swap.transaction_hash == front_swap.transaction_hash:
continue
if other_swap.contract_address == front_swap.contract_address:
if (
other_swap.token_in_address == front_swap.token_in_address
and other_swap.token_out_address == front_swap.token_out_address
and other_swap.from_address != sandwicher_address
):
sandwiched_swaps.append(other_swap)
elif (
other_swap.token_out_address == front_swap.token_in_address
and other_swap.token_in_address == front_swap.token_out_address
and other_swap.from_address == sandwicher_address
):
return Sandwich(
block_number=front_swap.block_number,
sandwicher_address=sandwicher_address,
frontrun_swap=front_swap,
backrun_swap=other_swap,
sandwiched_swaps=sandwiched_swaps,
)
return None

View File

@ -0,0 +1,13 @@
from typing import List
from pydantic import BaseModel
from .swaps import Swap
class Sandwich(BaseModel):
block_number: int
sandwicher_address: str
frontrun_swap: Swap
backrun_swap: Swap
sandwiched_swaps: List[Swap]

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,116 @@
[{
"block_number": 12775690,
"sandwicher_address": "0x000000000027d2efc283613d0c3e24a8b430c4d8",
"frontrun_swap": {
"abi_name": "UniswapV2Pair",
"transaction_hash": "0x91a3abe5f3b806426542252820ba0ab6d56c098fdef6864ecaf4d352f64217a0",
"transaction_position": 2,
"block_number": 12775690,
"trace_address": [
0,
2
],
"contract_address": "0xefb47fcfcad4f96c83d4ca676842fb03ef20a477",
"from_address": "0x000000000027d2efc283613d0c3e24a8b430c4d8",
"to_address": "0x000000000027d2efc283613d0c3e24a8b430c4d8",
"token_in_address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
"token_in_amount": 12108789017249529876,
"token_out_address": "0x9813037ee2218799597d83d4a5b6f3b6778218d9",
"token_out_amount": 1114969767487478978357,
"protocol": null,
"error": null
},
"backrun_swap": {
"abi_name": "UniswapV2Pair",
"transaction_hash": "0xc300d1ff79d3901b58dc56489fc7d083a6c13d422bfc1425a0579379300c95a2",
"transaction_position": 7,
"block_number": 12775690,
"trace_address": [
0,
3
],
"contract_address": "0xefb47fcfcad4f96c83d4ca676842fb03ef20a477",
"from_address": "0x000000000027d2efc283613d0c3e24a8b430c4d8",
"to_address": "0x000000000027d2efc283613d0c3e24a8b430c4d8",
"token_in_address": "0x9813037ee2218799597d83d4a5b6f3b6778218d9",
"token_in_amount": 1114969767487478978357,
"token_out_address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
"token_out_amount": 12158780499164852150,
"protocol": null,
"error": null
},
"sandwiched_swaps": [
{
"abi_name": "UniswapV2Pair",
"transaction_hash": "0x9b40deca1f53593b7631ca25485d0c6faf90279b9872845acfd5c98afb185934",
"transaction_position": 3,
"block_number": 12775690,
"trace_address": [
3
],
"contract_address": "0xefb47fcfcad4f96c83d4ca676842fb03ef20a477",
"from_address": "0x03f7724180aa6b939894b5ca4314783b0b36b329",
"to_address": "0x37e17e96736aee2ca809abd91e0f8744910ca19a",
"token_in_address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
"token_in_amount": 652974555369106606,
"token_out_address": "0x9813037ee2218799597d83d4a5b6f3b6778218d9",
"token_out_amount": 60000000000000000000,
"protocol": null,
"error": null
},
{
"abi_name": "UniswapV2Pair",
"transaction_hash": "0xf8e45a291cdab5e456375e4d7df30771670d504835c9332b32114e5bc4e315f9",
"transaction_position": 4,
"block_number": 12775690,
"trace_address": [
3
],
"contract_address": "0xefb47fcfcad4f96c83d4ca676842fb03ef20a477",
"from_address": "0x03f7724180aa6b939894b5ca4314783b0b36b329",
"to_address": "0xd3b7ddf9eb72837f0ee3d1d30dec0e45fbdf79b1",
"token_in_address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
"token_in_amount": 300000000000000000,
"token_out_address": "0x9813037ee2218799597d83d4a5b6f3b6778218d9",
"token_out_amount": 27561865602394087181,
"protocol": null,
"error": null
},
{
"abi_name": "UniswapV2Pair",
"transaction_hash": "0xdf63b22773b66cc41e00fd42c3b3c7f42912f87476ffe6d821e3f5c00284f00b",
"transaction_position": 5,
"block_number": 12775690,
"trace_address": [
3
],
"contract_address": "0xefb47fcfcad4f96c83d4ca676842fb03ef20a477",
"from_address": "0x03f7724180aa6b939894b5ca4314783b0b36b329",
"to_address": "0xcf99e104fdc46bea618d85ac5250067f19a56e41",
"token_in_address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
"token_in_amount": 125000000000000000,
"token_out_address": "0x9813037ee2218799597d83d4a5b6f3b6778218d9",
"token_out_amount": 11483313070817976324,
"protocol": null,
"error": null
},
{
"abi_name": "UniswapV2Pair",
"transaction_hash": "0x1fe35f66e24f12bdb54a0d35934aac809c783710d998621b70116ea9f95f4f4f",
"transaction_position": 6,
"block_number": 12775690,
"trace_address": [
3
],
"contract_address": "0xefb47fcfcad4f96c83d4ca676842fb03ef20a477",
"from_address": "0x03f7724180aa6b939894b5ca4314783b0b36b329",
"to_address": "0xd7c9f3010efdff665ee72580ffa7b4141e56b17e",
"token_in_address": "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2",
"token_in_amount": 30000000000000000000,
"token_out_address": "0x9813037ee2218799597d83d4a5b6f3b6778218d9",
"token_out_amount": 2742522049933966038599,
"protocol": null,
"error": null
}
]
}]

View File

@ -0,0 +1,19 @@
from mev_inspect.classifiers.trace import TraceClassifier
from mev_inspect.sandwiches import get_sandwiches
from mev_inspect.swaps import get_swaps
from .utils import load_test_block, load_test_sandwiches
def test_arbitrage_real_block():
block = load_test_block(12775690)
expected_sandwiches = load_test_sandwiches(12775690)
trace_classifier = TraceClassifier()
classified_traces = trace_classifier.classify(block.traces)
swaps = get_swaps(classified_traces)
assert len(swaps) == 21
sandwiches = get_sandwiches(list(swaps))
assert sandwiches == expected_sandwiches

View File

@ -1,19 +1,34 @@
import json
import os
from typing import Dict
from typing import Dict, List
from pydantic import parse_file_as
from mev_inspect.schemas.blocks import Block
from mev_inspect.schemas.sandwiches import Sandwich
THIS_FILE_DIRECTORY = os.path.dirname(__file__)
TEST_BLOCKS_DIRECTORY = os.path.join(THIS_FILE_DIRECTORY, "blocks")
TEST_SANDWICHES_DIRECTORY = os.path.join(THIS_FILE_DIRECTORY, "sandwiches")
def load_test_sandwiches(block_number: int) -> List[Sandwich]:
sandwiches_path = f"{TEST_SANDWICHES_DIRECTORY}/{block_number}.json"
return parse_file_as(List[Sandwich], sandwiches_path)
def load_test_block(block_number: int) -> Block:
block_path = f"{TEST_BLOCKS_DIRECTORY}/{block_number}.json"
defaults = {"block_timestamp": 0}
with open(block_path, "r") as block_file:
block_json = json.load(block_file)
return Block(**block_json, block_timestamp=0)
return Block(
**{
**defaults,
**block_json,
}
)
def load_comp_markets() -> Dict[str, str]: