Merge
This commit is contained in:
commit
edc40a3106
@ -0,0 +1,23 @@
|
|||||||
|
"""Add error column to liquidations
|
||||||
|
|
||||||
|
Revision ID: 4b9d289f2d74
|
||||||
|
Revises: 99d376cb93cc
|
||||||
|
Create Date: 2021-12-23 14:54:28.406159
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "4b9d289f2d74"
|
||||||
|
down_revision = "99d376cb93cc"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column("liquidations", sa.Column("error", sa.String(256), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column("liquidations", "error")
|
23
alembic/versions/99d376cb93cc_error.py
Normal file
23
alembic/versions/99d376cb93cc_error.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""error column
|
||||||
|
|
||||||
|
Revision ID: 99d376cb93cc
|
||||||
|
Revises: c4a7620a2d33
|
||||||
|
Create Date: 2021-12-21 21:26:12.142484
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "99d376cb93cc"
|
||||||
|
down_revision = "c4a7620a2d33"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column("arbitrages", sa.Column("error", sa.String(256), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_column("arbitrages", "error")
|
28
alembic/versions/c4a7620a2d33_create_tokens_table.py
Normal file
28
alembic/versions/c4a7620a2d33_create_tokens_table.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
"""Create tokens table
|
||||||
|
|
||||||
|
Revision ID: c4a7620a2d33
|
||||||
|
Revises: 15ba9c27ee8a
|
||||||
|
Create Date: 2021-12-21 19:12:33.940117
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "c4a7620a2d33"
|
||||||
|
down_revision = "15ba9c27ee8a"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
"tokens",
|
||||||
|
sa.Column("token_address", sa.String(256), nullable=False),
|
||||||
|
sa.Column("decimals", sa.Numeric, nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("token_address"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table("tokens")
|
2
listener
2
listener
@ -23,6 +23,7 @@ case "$1" in
|
|||||||
stop)
|
stop)
|
||||||
echo -n "Stopping daemon: "$NAME
|
echo -n "Stopping daemon: "$NAME
|
||||||
start-stop-daemon --stop --quiet --oknodo --pidfile $PIDFILE
|
start-stop-daemon --stop --quiet --oknodo --pidfile $PIDFILE
|
||||||
|
rm $PIDFILE
|
||||||
echo "."
|
echo "."
|
||||||
;;
|
;;
|
||||||
tail)
|
tail)
|
||||||
@ -31,6 +32,7 @@ case "$1" in
|
|||||||
restart)
|
restart)
|
||||||
echo -n "Restarting daemon: "$NAME
|
echo -n "Restarting daemon: "$NAME
|
||||||
start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $PIDFILE
|
start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $PIDFILE
|
||||||
|
rm $PIDFILE
|
||||||
start-stop-daemon \
|
start-stop-daemon \
|
||||||
--background \
|
--background \
|
||||||
--chdir /app \
|
--chdir /app \
|
||||||
|
@ -65,14 +65,10 @@ async def inspect_next_block(
|
|||||||
|
|
||||||
if last_written_block is None:
|
if last_written_block is None:
|
||||||
# maintain lag if no blocks written yet
|
# maintain lag if no blocks written yet
|
||||||
last_written_block = latest_block_number - 1
|
last_written_block = latest_block_number - BLOCK_NUMBER_LAG - 1
|
||||||
|
|
||||||
if last_written_block < (latest_block_number - BLOCK_NUMBER_LAG):
|
if last_written_block < (latest_block_number - BLOCK_NUMBER_LAG):
|
||||||
block_number = (
|
block_number = last_written_block + 1
|
||||||
latest_block_number
|
|
||||||
if last_written_block is None
|
|
||||||
else last_written_block + 1
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"Writing block: {block_number}")
|
logger.info(f"Writing block: {block_number}")
|
||||||
|
|
||||||
|
2
mev
2
mev
@ -13,7 +13,7 @@ function db(){
|
|||||||
username=$(get_kube_db_secret "username")
|
username=$(get_kube_db_secret "username")
|
||||||
password=$(get_kube_db_secret "password")
|
password=$(get_kube_db_secret "password")
|
||||||
|
|
||||||
kubectl run -i --rm --tty postgres-client \
|
kubectl run -i --rm --tty postgres-client-$RANDOM \
|
||||||
--env="PGPASSWORD=$password" \
|
--env="PGPASSWORD=$password" \
|
||||||
--image=jbergknoff/postgresql-client \
|
--image=jbergknoff/postgresql-client \
|
||||||
-- $DB_NAME --host=$host --user=$username
|
-- $DB_NAME --host=$host --user=$username
|
||||||
|
@ -70,6 +70,7 @@ def get_aave_liquidations(
|
|||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
trace_address=trace.trace_address,
|
trace_address=trace.trace_address,
|
||||||
block_number=trace.block_number,
|
block_number=trace.block_number,
|
||||||
|
error=trace.error,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
from typing import List, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
from mev_inspect.schemas.arbitrages import Arbitrage
|
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||||
from mev_inspect.schemas.swaps import Swap
|
from mev_inspect.schemas.swaps import Swap
|
||||||
@ -45,17 +45,23 @@ def _get_arbitrages_from_swaps(swaps: List[Swap]) -> List[Arbitrage]:
|
|||||||
if len(start_ends) == 0:
|
if len(start_ends) == 0:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# for (start, end) in filtered_start_ends:
|
used_swaps: List[Swap] = []
|
||||||
for (start, end) in start_ends:
|
|
||||||
potential_intermediate_swaps = [
|
|
||||||
swap for swap in swaps if swap is not start and swap is not end
|
|
||||||
]
|
|
||||||
routes = _get_all_routes(start, end, potential_intermediate_swaps)
|
|
||||||
|
|
||||||
for route in routes:
|
for (start, ends) in start_ends:
|
||||||
|
if start in used_swaps:
|
||||||
|
continue
|
||||||
|
|
||||||
|
unused_ends = [end for end in ends if end not in used_swaps]
|
||||||
|
route = _get_shortest_route(start, unused_ends, swaps)
|
||||||
|
|
||||||
|
if route is not None:
|
||||||
start_amount = route[0].token_in_amount
|
start_amount = route[0].token_in_amount
|
||||||
end_amount = route[-1].token_out_amount
|
end_amount = route[-1].token_out_amount
|
||||||
profit_amount = end_amount - start_amount
|
profit_amount = end_amount - start_amount
|
||||||
|
error = None
|
||||||
|
for swap in route:
|
||||||
|
if swap.error is not None:
|
||||||
|
error = swap.error
|
||||||
|
|
||||||
arb = Arbitrage(
|
arb = Arbitrage(
|
||||||
swaps=route,
|
swaps=route,
|
||||||
@ -66,8 +72,12 @@ def _get_arbitrages_from_swaps(swaps: List[Swap]) -> List[Arbitrage]:
|
|||||||
start_amount=start_amount,
|
start_amount=start_amount,
|
||||||
end_amount=end_amount,
|
end_amount=end_amount,
|
||||||
profit_amount=profit_amount,
|
profit_amount=profit_amount,
|
||||||
|
error=error,
|
||||||
)
|
)
|
||||||
|
|
||||||
all_arbitrages.append(arb)
|
all_arbitrages.append(arb)
|
||||||
|
used_swaps.extend(route)
|
||||||
|
|
||||||
if len(all_arbitrages) == 1:
|
if len(all_arbitrages) == 1:
|
||||||
return all_arbitrages
|
return all_arbitrages
|
||||||
else:
|
else:
|
||||||
@ -78,18 +88,63 @@ def _get_arbitrages_from_swaps(swaps: List[Swap]) -> List[Arbitrage]:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, Swap]]:
|
def _get_shortest_route(
|
||||||
|
start_swap: Swap,
|
||||||
|
end_swaps: List[Swap],
|
||||||
|
all_swaps: List[Swap],
|
||||||
|
) -> Optional[List[Swap]]:
|
||||||
|
for end_swap in end_swaps:
|
||||||
|
if start_swap.token_out_address == end_swap.token_in_address:
|
||||||
|
return [start_swap, end_swap]
|
||||||
|
|
||||||
|
other_swaps = [
|
||||||
|
swap for swap in all_swaps if (swap is not start_swap and swap not in end_swaps)
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(other_swaps) == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
shortest_remaining_route = None
|
||||||
|
|
||||||
|
for next_swap in other_swaps:
|
||||||
|
if start_swap.token_out_address == next_swap.token_in_address and (
|
||||||
|
start_swap.contract_address == next_swap.from_address
|
||||||
|
or start_swap.to_address == next_swap.contract_address
|
||||||
|
or start_swap.to_address == next_swap.from_address
|
||||||
|
):
|
||||||
|
shortest_from_next = _get_shortest_route(
|
||||||
|
next_swap,
|
||||||
|
end_swaps,
|
||||||
|
other_swaps,
|
||||||
|
)
|
||||||
|
|
||||||
|
if shortest_from_next is not None and (
|
||||||
|
shortest_remaining_route is None
|
||||||
|
or len(shortest_from_next) < len(shortest_remaining_route)
|
||||||
|
):
|
||||||
|
shortest_remaining_route = shortest_from_next
|
||||||
|
|
||||||
|
if shortest_remaining_route is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return [start_swap] + shortest_remaining_route
|
||||||
|
|
||||||
|
|
||||||
|
def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, List[Swap]]]:
|
||||||
"""
|
"""
|
||||||
Gets the set of all possible opening and closing swap pairs in an arbitrage via
|
Gets the set of all possible openings and corresponding closing swaps for an arbitrage via
|
||||||
- swap[start].token_in == swap[end].token_out
|
- swap[start].token_in == swap[end].token_out
|
||||||
- swap[start].from_address == swap[end].to_address
|
- swap[start].from_address == swap[end].to_address
|
||||||
- not swap[start].from_address in all_pool_addresses
|
- not swap[start].from_address in all_pool_addresses
|
||||||
- not swap[end].to_address in all_pool_addresses
|
- not swap[end].to_address in all_pool_addresses
|
||||||
"""
|
"""
|
||||||
pool_addrs = [swap.contract_address for swap in swaps]
|
pool_addrs = [swap.contract_address for swap in swaps]
|
||||||
valid_start_ends: List[Tuple[Swap, Swap]] = []
|
valid_start_ends: List[Tuple[Swap, List[Swap]]] = []
|
||||||
|
|
||||||
for index, potential_start_swap in enumerate(swaps):
|
for index, potential_start_swap in enumerate(swaps):
|
||||||
|
ends_for_start: List[Swap] = []
|
||||||
remaining_swaps = swaps[:index] + swaps[index + 1 :]
|
remaining_swaps = swaps[:index] + swaps[index + 1 :]
|
||||||
|
|
||||||
for potential_end_swap in remaining_swaps:
|
for potential_end_swap in remaining_swaps:
|
||||||
if (
|
if (
|
||||||
potential_start_swap.token_in_address
|
potential_start_swap.token_in_address
|
||||||
@ -97,38 +152,10 @@ def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, Swap]]:
|
|||||||
and potential_start_swap.from_address == potential_end_swap.to_address
|
and potential_start_swap.from_address == potential_end_swap.to_address
|
||||||
and not potential_start_swap.from_address in pool_addrs
|
and not potential_start_swap.from_address in pool_addrs
|
||||||
):
|
):
|
||||||
valid_start_ends.append((potential_start_swap, potential_end_swap))
|
|
||||||
|
ends_for_start.append(potential_end_swap)
|
||||||
|
|
||||||
|
if len(ends_for_start) > 0:
|
||||||
|
valid_start_ends.append((potential_start_swap, ends_for_start))
|
||||||
|
|
||||||
return valid_start_ends
|
return valid_start_ends
|
||||||
|
|
||||||
|
|
||||||
def _get_all_routes(
|
|
||||||
start_swap: Swap, end_swap: Swap, other_swaps: List[Swap]
|
|
||||||
) -> List[List[Swap]]:
|
|
||||||
"""
|
|
||||||
Returns all routes (List[Swap]) from start to finish between a start_swap and an end_swap only accounting for token_address_in and token_address_out.
|
|
||||||
"""
|
|
||||||
# If the path is complete, return
|
|
||||||
if start_swap.token_out_address == end_swap.token_in_address:
|
|
||||||
return [[start_swap, end_swap]]
|
|
||||||
elif len(other_swaps) == 0:
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Collect all potential next steps, check if valid, recursively find routes from next_step to end_swap
|
|
||||||
routes: List[List[Swap]] = []
|
|
||||||
for potential_next_swap in other_swaps:
|
|
||||||
if start_swap.token_out_address == potential_next_swap.token_in_address and (
|
|
||||||
start_swap.contract_address == potential_next_swap.from_address
|
|
||||||
or start_swap.to_address == potential_next_swap.contract_address
|
|
||||||
or start_swap.to_address == potential_next_swap.from_address
|
|
||||||
):
|
|
||||||
remaining_swaps = [
|
|
||||||
swap for swap in other_swaps if swap != potential_next_swap
|
|
||||||
]
|
|
||||||
next_swap_routes = _get_all_routes(
|
|
||||||
potential_next_swap, end_swap, remaining_swaps
|
|
||||||
)
|
|
||||||
if len(next_swap_routes) > 0:
|
|
||||||
for next_swap_route in next_swap_routes:
|
|
||||||
next_swap_route.insert(0, start_swap)
|
|
||||||
routes.append(next_swap_route)
|
|
||||||
return routes
|
|
||||||
|
@ -4,10 +4,13 @@ from mev_inspect.classifiers.specs.weth import WETH_ADDRESS
|
|||||||
from mev_inspect.schemas.coinbase import CoinbasePrices, CoinbasePricesResponse
|
from mev_inspect.schemas.coinbase import CoinbasePrices, CoinbasePricesResponse
|
||||||
from mev_inspect.schemas.prices import (
|
from mev_inspect.schemas.prices import (
|
||||||
AAVE_TOKEN_ADDRESS,
|
AAVE_TOKEN_ADDRESS,
|
||||||
|
CDAI_TOKEN_ADDRESS,
|
||||||
|
CUSDC_TOKEN_ADDRESS,
|
||||||
|
DAI_TOKEN_ADDRESS,
|
||||||
LINK_TOKEN_ADDRESS,
|
LINK_TOKEN_ADDRESS,
|
||||||
REN_TOKEN_ADDRESS,
|
REN_TOKEN_ADDRESS,
|
||||||
UNI_TOKEN_ADDRESS,
|
UNI_TOKEN_ADDRESS,
|
||||||
USDC_TOKEN_ADDRESS_ADDRESS,
|
USDC_TOKEN_ADDRESS,
|
||||||
WBTC_TOKEN_ADDRESS,
|
WBTC_TOKEN_ADDRESS,
|
||||||
YEARN_TOKEN_ADDRESS,
|
YEARN_TOKEN_ADDRESS,
|
||||||
)
|
)
|
||||||
@ -22,8 +25,11 @@ COINBASE_TOKEN_NAME_BY_ADDRESS = {
|
|||||||
YEARN_TOKEN_ADDRESS: "yearn-finance",
|
YEARN_TOKEN_ADDRESS: "yearn-finance",
|
||||||
AAVE_TOKEN_ADDRESS: "aave",
|
AAVE_TOKEN_ADDRESS: "aave",
|
||||||
UNI_TOKEN_ADDRESS: "uniswap",
|
UNI_TOKEN_ADDRESS: "uniswap",
|
||||||
USDC_TOKEN_ADDRESS_ADDRESS: "usdc",
|
USDC_TOKEN_ADDRESS: "usdc",
|
||||||
|
DAI_TOKEN_ADDRESS: "dai",
|
||||||
REN_TOKEN_ADDRESS: "ren",
|
REN_TOKEN_ADDRESS: "ren",
|
||||||
|
CUSDC_TOKEN_ADDRESS: "compound-usd-coin",
|
||||||
|
CDAI_TOKEN_ADDRESS: "compound-dai",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,6 +48,7 @@ def get_compound_liquidations(
|
|||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
trace_address=trace.trace_address,
|
trace_address=trace.trace_address,
|
||||||
block_number=trace.block_number,
|
block_number=trace.block_number,
|
||||||
|
error=trace.error,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif (
|
elif (
|
||||||
@ -65,6 +66,7 @@ def get_compound_liquidations(
|
|||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
trace_address=trace.trace_address,
|
trace_address=trace.trace_address,
|
||||||
block_number=trace.block_number,
|
block_number=trace.block_number,
|
||||||
|
error=trace.error,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return liquidations
|
return liquidations
|
||||||
|
@ -4,17 +4,20 @@ from uuid import uuid4
|
|||||||
from mev_inspect.models.arbitrages import ArbitrageModel
|
from mev_inspect.models.arbitrages import ArbitrageModel
|
||||||
from mev_inspect.schemas.arbitrages import Arbitrage
|
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_arbitrages_for_block(
|
|
||||||
|
def delete_arbitrages_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(ArbitrageModel)
|
db_session,
|
||||||
.filter(ArbitrageModel.block_number == block_number)
|
ArbitrageModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
@ -37,6 +40,7 @@ def write_arbitrages(
|
|||||||
start_amount=arbitrage.start_amount,
|
start_amount=arbitrage.start_amount,
|
||||||
end_amount=arbitrage.end_amount,
|
end_amount=arbitrage.end_amount,
|
||||||
profit_amount=arbitrage.profit_amount,
|
profit_amount=arbitrage.profit_amount,
|
||||||
|
error=arbitrage.error,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -3,13 +3,22 @@ from datetime import datetime
|
|||||||
from mev_inspect.schemas.blocks import Block
|
from mev_inspect.schemas.blocks import Block
|
||||||
|
|
||||||
|
|
||||||
def delete_block(
|
def delete_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
db_session.execute(
|
db_session.execute(
|
||||||
"DELETE FROM blocks WHERE block_number = :block_number",
|
"""
|
||||||
params={"block_number": block_number},
|
DELETE FROM blocks
|
||||||
|
WHERE
|
||||||
|
block_number >= :after_block_number AND
|
||||||
|
block_number < :before_block_number
|
||||||
|
""",
|
||||||
|
params={
|
||||||
|
"after_block_number": after_block_number,
|
||||||
|
"before_block_number": before_block_number,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
@ -4,17 +4,20 @@ from typing import List
|
|||||||
from mev_inspect.models.liquidations import LiquidationModel
|
from mev_inspect.models.liquidations import LiquidationModel
|
||||||
from mev_inspect.schemas.liquidations import Liquidation
|
from mev_inspect.schemas.liquidations import Liquidation
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_liquidations_for_block(
|
|
||||||
|
def delete_liquidations_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(LiquidationModel)
|
db_session,
|
||||||
.filter(LiquidationModel.block_number == block_number)
|
LiquidationModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,17 +4,20 @@ from typing import List
|
|||||||
from mev_inspect.models.miner_payments import MinerPaymentModel
|
from mev_inspect.models.miner_payments import MinerPaymentModel
|
||||||
from mev_inspect.schemas.miner_payments import MinerPayment
|
from mev_inspect.schemas.miner_payments import MinerPayment
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_miner_payments_for_block(
|
|
||||||
|
def delete_miner_payments_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(MinerPaymentModel)
|
db_session,
|
||||||
.filter(MinerPaymentModel.block_number == block_number)
|
MinerPaymentModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,20 +1,22 @@
|
|||||||
import json
|
import json
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
from mev_inspect.crud.shared import delete_by_block_range
|
||||||
from mev_inspect.models.nft_trades import NftTradeModel
|
from mev_inspect.models.nft_trades import NftTradeModel
|
||||||
from mev_inspect.schemas.nft_trades import NftTrade
|
from mev_inspect.schemas.nft_trades import NftTrade
|
||||||
|
|
||||||
|
|
||||||
def delete_nft_trades_for_block(
|
def delete_nft_trades_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(NftTradeModel)
|
db_session,
|
||||||
.filter(NftTradeModel.block_number == block_number)
|
NftTradeModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -10,17 +10,20 @@ from mev_inspect.schemas.punk_accept_bid import PunkBidAcceptance
|
|||||||
from mev_inspect.schemas.punk_bid import PunkBid
|
from mev_inspect.schemas.punk_bid import PunkBid
|
||||||
from mev_inspect.schemas.punk_snipe import PunkSnipe
|
from mev_inspect.schemas.punk_snipe import PunkSnipe
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_punk_bid_acceptances_for_block(
|
|
||||||
|
def delete_punk_bid_acceptances_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(PunkBidAcceptanceModel)
|
db_session,
|
||||||
.filter(PunkBidAcceptanceModel.block_number == block_number)
|
PunkBidAcceptanceModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
@ -37,16 +40,17 @@ def write_punk_bid_acceptances(
|
|||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
def delete_punk_bids_for_block(
|
def delete_punk_bids_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(PunkBidModel)
|
db_session,
|
||||||
.filter(PunkBidModel.block_number == block_number)
|
PunkBidModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
@ -60,16 +64,17 @@ def write_punk_bids(
|
|||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
def delete_punk_snipes_for_block(
|
def delete_punk_snipes_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(PunkSnipeModel)
|
db_session,
|
||||||
.filter(PunkSnipeModel.block_number == block_number)
|
PunkSnipeModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,17 +4,20 @@ from uuid import uuid4
|
|||||||
from mev_inspect.models.sandwiches import SandwichModel
|
from mev_inspect.models.sandwiches import SandwichModel
|
||||||
from mev_inspect.schemas.sandwiches import Sandwich
|
from mev_inspect.schemas.sandwiches import Sandwich
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_sandwiches_for_block(
|
|
||||||
|
def delete_sandwiches_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(SandwichModel)
|
db_session,
|
||||||
.filter(SandwichModel.block_number == block_number)
|
SandwichModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
20
mev_inspect/crud/shared.py
Normal file
20
mev_inspect/crud/shared.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from mev_inspect.models.base import Base
|
||||||
|
|
||||||
|
|
||||||
|
def delete_by_block_range(
|
||||||
|
db_session,
|
||||||
|
model_class: Type[Base],
|
||||||
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
(
|
||||||
|
db_session.query(model_class)
|
||||||
|
.filter(model_class.block_number >= after_block_number)
|
||||||
|
.filter(model_class.block_number < before_block_number)
|
||||||
|
.delete()
|
||||||
|
)
|
||||||
|
|
||||||
|
db_session.commit()
|
@ -4,17 +4,20 @@ from typing import List
|
|||||||
from mev_inspect.models.swaps import SwapModel
|
from mev_inspect.models.swaps import SwapModel
|
||||||
from mev_inspect.schemas.swaps import Swap
|
from mev_inspect.schemas.swaps import Swap
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_swaps_for_block(
|
|
||||||
|
def delete_swaps_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(SwapModel)
|
db_session,
|
||||||
.filter(SwapModel.block_number == block_number)
|
SwapModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,15 +4,19 @@ from typing import List
|
|||||||
from mev_inspect.models.traces import ClassifiedTraceModel
|
from mev_inspect.models.traces import ClassifiedTraceModel
|
||||||
from mev_inspect.schemas.traces import ClassifiedTrace
|
from mev_inspect.schemas.traces import ClassifiedTrace
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_classified_traces_for_block(
|
|
||||||
|
def delete_classified_traces_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(ClassifiedTraceModel)
|
db_session,
|
||||||
.filter(ClassifiedTraceModel.block_number == block_number)
|
ClassifiedTraceModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
@ -4,15 +4,19 @@ from typing import List
|
|||||||
from mev_inspect.models.transfers import TransferModel
|
from mev_inspect.models.transfers import TransferModel
|
||||||
from mev_inspect.schemas.transfers import Transfer
|
from mev_inspect.schemas.transfers import Transfer
|
||||||
|
|
||||||
|
from .shared import delete_by_block_range
|
||||||
|
|
||||||
def delete_transfers_for_block(
|
|
||||||
|
def delete_transfers_for_blocks(
|
||||||
db_session,
|
db_session,
|
||||||
block_number: int,
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
(
|
delete_by_block_range(
|
||||||
db_session.query(TransferModel)
|
db_session,
|
||||||
.filter(TransferModel.block_number == block_number)
|
TransferModel,
|
||||||
.delete()
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
@ -12,7 +12,7 @@ def get_trace_database_uri() -> Optional[str]:
|
|||||||
db_name = "trace_db"
|
db_name = "trace_db"
|
||||||
|
|
||||||
if all(field is not None for field in [username, password, host]):
|
if all(field is not None for field in [username, password, host]):
|
||||||
return f"postgresql://{username}:{password}@{host}/{db_name}"
|
return f"postgresql+psycopg2://{username}:{password}@{host}/{db_name}"
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -22,11 +22,16 @@ def get_inspect_database_uri():
|
|||||||
password = os.getenv("POSTGRES_PASSWORD")
|
password = os.getenv("POSTGRES_PASSWORD")
|
||||||
host = os.getenv("POSTGRES_HOST")
|
host = os.getenv("POSTGRES_HOST")
|
||||||
db_name = "mev_inspect"
|
db_name = "mev_inspect"
|
||||||
return f"postgresql://{username}:{password}@{host}/{db_name}"
|
return f"postgresql+psycopg2://{username}:{password}@{host}/{db_name}"
|
||||||
|
|
||||||
|
|
||||||
def _get_engine(uri: str):
|
def _get_engine(uri: str):
|
||||||
return create_engine(uri)
|
return create_engine(
|
||||||
|
uri,
|
||||||
|
executemany_mode="values",
|
||||||
|
executemany_values_page_size=10000,
|
||||||
|
executemany_batch_page_size=500,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_session(uri: str):
|
def _get_session(uri: str):
|
||||||
|
@ -38,7 +38,7 @@ class ABIDecoder:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
decoded = decode_abi(types, hexstr_to_bytes(params))
|
decoded = decode_abi(types, hexstr_to_bytes(params))
|
||||||
except (InsufficientDataBytes, NonEmptyPaddingBytes):
|
except (InsufficientDataBytes, NonEmptyPaddingBytes, OverflowError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return CallData(
|
return CallData(
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from sqlalchemy import orm
|
from sqlalchemy import orm
|
||||||
from web3 import Web3
|
from web3 import Web3
|
||||||
@ -7,37 +7,49 @@ from web3 import Web3
|
|||||||
from mev_inspect.arbitrages import get_arbitrages
|
from mev_inspect.arbitrages import get_arbitrages
|
||||||
from mev_inspect.block import create_from_block_number
|
from mev_inspect.block import create_from_block_number
|
||||||
from mev_inspect.classifiers.trace import TraceClassifier
|
from mev_inspect.classifiers.trace import TraceClassifier
|
||||||
from mev_inspect.crud.arbitrages import delete_arbitrages_for_block, write_arbitrages
|
from mev_inspect.crud.arbitrages import delete_arbitrages_for_blocks, write_arbitrages
|
||||||
from mev_inspect.crud.blocks import delete_block, write_block
|
from mev_inspect.crud.blocks import delete_blocks, write_block
|
||||||
from mev_inspect.crud.liquidations import (
|
from mev_inspect.crud.liquidations import (
|
||||||
delete_liquidations_for_block,
|
delete_liquidations_for_blocks,
|
||||||
write_liquidations,
|
write_liquidations,
|
||||||
)
|
)
|
||||||
from mev_inspect.crud.miner_payments import (
|
from mev_inspect.crud.miner_payments import (
|
||||||
delete_miner_payments_for_block,
|
delete_miner_payments_for_blocks,
|
||||||
write_miner_payments,
|
write_miner_payments,
|
||||||
)
|
)
|
||||||
from mev_inspect.crud.nft_trades import delete_nft_trades_for_block, write_nft_trades
|
from mev_inspect.crud.nft_trades import delete_nft_trades_for_blocks, write_nft_trades
|
||||||
from mev_inspect.crud.punks import (
|
from mev_inspect.crud.punks import (
|
||||||
delete_punk_bid_acceptances_for_block,
|
delete_punk_bid_acceptances_for_blocks,
|
||||||
delete_punk_bids_for_block,
|
delete_punk_bids_for_blocks,
|
||||||
delete_punk_snipes_for_block,
|
delete_punk_snipes_for_blocks,
|
||||||
write_punk_bid_acceptances,
|
write_punk_bid_acceptances,
|
||||||
write_punk_bids,
|
write_punk_bids,
|
||||||
write_punk_snipes,
|
write_punk_snipes,
|
||||||
)
|
)
|
||||||
from mev_inspect.crud.sandwiches import delete_sandwiches_for_block, write_sandwiches
|
from mev_inspect.crud.sandwiches import delete_sandwiches_for_blocks, write_sandwiches
|
||||||
from mev_inspect.crud.swaps import delete_swaps_for_block, write_swaps
|
from mev_inspect.crud.swaps import delete_swaps_for_blocks, write_swaps
|
||||||
from mev_inspect.crud.traces import (
|
from mev_inspect.crud.traces import (
|
||||||
delete_classified_traces_for_block,
|
delete_classified_traces_for_blocks,
|
||||||
write_classified_traces,
|
write_classified_traces,
|
||||||
)
|
)
|
||||||
from mev_inspect.crud.transfers import delete_transfers_for_block, write_transfers
|
from mev_inspect.crud.transfers import delete_transfers_for_blocks, write_transfers
|
||||||
from mev_inspect.liquidations import get_liquidations
|
from mev_inspect.liquidations import get_liquidations
|
||||||
from mev_inspect.miner_payments import get_miner_payments
|
from mev_inspect.miner_payments import get_miner_payments
|
||||||
from mev_inspect.nft_trades import get_nft_trades
|
from mev_inspect.nft_trades import get_nft_trades
|
||||||
from mev_inspect.punks import get_punk_bid_acceptances, get_punk_bids, get_punk_snipes
|
from mev_inspect.punks import get_punk_bid_acceptances, get_punk_bids, get_punk_snipes
|
||||||
from mev_inspect.sandwiches import get_sandwiches
|
from mev_inspect.sandwiches import get_sandwiches
|
||||||
|
from mev_inspect.schemas.arbitrages import Arbitrage
|
||||||
|
from mev_inspect.schemas.blocks import Block
|
||||||
|
from mev_inspect.schemas.liquidations import Liquidation
|
||||||
|
from mev_inspect.schemas.miner_payments import MinerPayment
|
||||||
|
from mev_inspect.schemas.nft_trades import NftTrade
|
||||||
|
from mev_inspect.schemas.punk_accept_bid import PunkBidAcceptance
|
||||||
|
from mev_inspect.schemas.punk_bid import PunkBid
|
||||||
|
from mev_inspect.schemas.punk_snipe import PunkSnipe
|
||||||
|
from mev_inspect.schemas.sandwiches import Sandwich
|
||||||
|
from mev_inspect.schemas.swaps import Swap
|
||||||
|
from mev_inspect.schemas.traces import ClassifiedTrace
|
||||||
|
from mev_inspect.schemas.transfers import Transfer
|
||||||
from mev_inspect.swaps import get_swaps
|
from mev_inspect.swaps import get_swaps
|
||||||
from mev_inspect.transfers import get_transfers
|
from mev_inspect.transfers import get_transfers
|
||||||
|
|
||||||
@ -53,85 +65,166 @@ async def inspect_block(
|
|||||||
trace_db_session: Optional[orm.Session],
|
trace_db_session: Optional[orm.Session],
|
||||||
should_write_classified_traces: bool = True,
|
should_write_classified_traces: bool = True,
|
||||||
):
|
):
|
||||||
block = await create_from_block_number(
|
await inspect_many_blocks(
|
||||||
|
inspect_db_session,
|
||||||
base_provider,
|
base_provider,
|
||||||
w3,
|
w3,
|
||||||
|
trace_classifier,
|
||||||
block_number,
|
block_number,
|
||||||
|
block_number + 1,
|
||||||
trace_db_session,
|
trace_db_session,
|
||||||
|
should_write_classified_traces,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(f"Block: {block_number} -- Total traces: {len(block.traces)}")
|
|
||||||
|
|
||||||
delete_block(inspect_db_session, block_number)
|
async def inspect_many_blocks(
|
||||||
|
inspect_db_session: orm.Session,
|
||||||
|
base_provider,
|
||||||
|
w3: Web3,
|
||||||
|
trace_classifier: TraceClassifier,
|
||||||
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
|
trace_db_session: Optional[orm.Session],
|
||||||
|
should_write_classified_traces: bool = True,
|
||||||
|
):
|
||||||
|
all_blocks: List[Block] = []
|
||||||
|
all_classified_traces: List[ClassifiedTrace] = []
|
||||||
|
all_transfers: List[Transfer] = []
|
||||||
|
all_swaps: List[Swap] = []
|
||||||
|
all_arbitrages: List[Arbitrage] = []
|
||||||
|
all_liqudations: List[Liquidation] = []
|
||||||
|
all_sandwiches: List[Sandwich] = []
|
||||||
|
|
||||||
|
all_punk_bids: List[PunkBid] = []
|
||||||
|
all_punk_bid_acceptances: List[PunkBidAcceptance] = []
|
||||||
|
all_punk_snipes: List[PunkSnipe] = []
|
||||||
|
|
||||||
|
all_miner_payments: List[MinerPayment] = []
|
||||||
|
|
||||||
|
all_nft_trades: List[NftTrade] = []
|
||||||
|
|
||||||
|
for block_number in range(after_block_number, before_block_number):
|
||||||
|
block = await create_from_block_number(
|
||||||
|
base_provider,
|
||||||
|
w3,
|
||||||
|
block_number,
|
||||||
|
trace_db_session,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Block: {block_number} -- Total traces: {len(block.traces)}")
|
||||||
|
|
||||||
|
total_transactions = len(
|
||||||
|
set(
|
||||||
|
t.transaction_hash
|
||||||
|
for t in block.traces
|
||||||
|
if t.transaction_hash is not None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Block: {block_number} -- Total transactions: {total_transactions}"
|
||||||
|
)
|
||||||
|
|
||||||
|
classified_traces = trace_classifier.classify(block.traces)
|
||||||
|
logger.info(
|
||||||
|
f"Block: {block_number} -- Returned {len(classified_traces)} classified traces"
|
||||||
|
)
|
||||||
|
|
||||||
|
transfers = get_transfers(classified_traces)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(transfers)} transfers")
|
||||||
|
|
||||||
|
swaps = get_swaps(classified_traces)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(swaps)} swaps")
|
||||||
|
|
||||||
|
arbitrages = get_arbitrages(swaps)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(arbitrages)} arbitrages")
|
||||||
|
|
||||||
|
liquidations = get_liquidations(classified_traces)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(liquidations)} liquidations")
|
||||||
|
|
||||||
|
sandwiches = get_sandwiches(swaps)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(sandwiches)} sandwiches")
|
||||||
|
|
||||||
|
punk_bids = get_punk_bids(classified_traces)
|
||||||
|
punk_bid_acceptances = get_punk_bid_acceptances(classified_traces)
|
||||||
|
punk_snipes = get_punk_snipes(punk_bids, punk_bid_acceptances)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(punk_snipes)} punk snipes")
|
||||||
|
|
||||||
|
nft_trades = get_nft_trades(classified_traces)
|
||||||
|
logger.info(f"Block: {block_number} -- Found {len(nft_trades)} nft trades")
|
||||||
|
|
||||||
|
miner_payments = get_miner_payments(
|
||||||
|
block.miner, block.base_fee_per_gas, classified_traces, block.receipts
|
||||||
|
)
|
||||||
|
|
||||||
|
all_blocks.append(block)
|
||||||
|
all_classified_traces.extend(classified_traces)
|
||||||
|
all_transfers.extend(transfers)
|
||||||
|
all_swaps.extend(swaps)
|
||||||
|
all_arbitrages.extend(arbitrages)
|
||||||
|
all_liqudations.extend(liquidations)
|
||||||
|
all_sandwiches.extend(sandwiches)
|
||||||
|
|
||||||
|
all_punk_bids.extend(punk_bids)
|
||||||
|
all_punk_bid_acceptances.extend(punk_bid_acceptances)
|
||||||
|
all_punk_snipes.extend(punk_snipes)
|
||||||
|
|
||||||
|
all_nft_trades.extend(nft_trades)
|
||||||
|
|
||||||
|
all_miner_payments.extend(miner_payments)
|
||||||
|
|
||||||
|
delete_blocks(inspect_db_session, after_block_number, before_block_number)
|
||||||
write_block(inspect_db_session, block)
|
write_block(inspect_db_session, block)
|
||||||
|
|
||||||
total_transactions = len(
|
|
||||||
set(t.transaction_hash for t in block.traces if t.transaction_hash is not None)
|
|
||||||
)
|
|
||||||
logger.info(f"Block: {block_number} -- Total transactions: {total_transactions}")
|
|
||||||
|
|
||||||
classified_traces = trace_classifier.classify(block.traces)
|
|
||||||
logger.info(
|
|
||||||
f"Block: {block_number} -- Returned {len(classified_traces)} classified traces"
|
|
||||||
)
|
|
||||||
|
|
||||||
if should_write_classified_traces:
|
if should_write_classified_traces:
|
||||||
delete_classified_traces_for_block(inspect_db_session, block_number)
|
delete_classified_traces_for_blocks(
|
||||||
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
write_classified_traces(inspect_db_session, classified_traces)
|
write_classified_traces(inspect_db_session, classified_traces)
|
||||||
|
|
||||||
transfers = get_transfers(classified_traces)
|
delete_transfers_for_blocks(
|
||||||
logger.info(f"Block: {block_number} -- Found {len(transfers)} transfers")
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
delete_transfers_for_block(inspect_db_session, block_number)
|
|
||||||
write_transfers(inspect_db_session, transfers)
|
write_transfers(inspect_db_session, transfers)
|
||||||
|
|
||||||
swaps = get_swaps(classified_traces)
|
delete_swaps_for_blocks(inspect_db_session, after_block_number, before_block_number)
|
||||||
logger.info(f"Block: {block_number} -- Found {len(swaps)} swaps")
|
|
||||||
|
|
||||||
delete_swaps_for_block(inspect_db_session, block_number)
|
|
||||||
write_swaps(inspect_db_session, swaps)
|
write_swaps(inspect_db_session, swaps)
|
||||||
|
|
||||||
arbitrages = get_arbitrages(swaps)
|
delete_arbitrages_for_blocks(
|
||||||
logger.info(f"Block: {block_number} -- Found {len(arbitrages)} arbitrages")
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
delete_arbitrages_for_block(inspect_db_session, block_number)
|
|
||||||
write_arbitrages(inspect_db_session, arbitrages)
|
write_arbitrages(inspect_db_session, arbitrages)
|
||||||
|
|
||||||
liquidations = get_liquidations(classified_traces)
|
delete_liquidations_for_blocks(
|
||||||
logger.info(f"Block: {block_number} -- Found {len(liquidations)} liquidations")
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
delete_liquidations_for_block(inspect_db_session, block_number)
|
|
||||||
write_liquidations(inspect_db_session, liquidations)
|
write_liquidations(inspect_db_session, liquidations)
|
||||||
|
|
||||||
sandwiches = get_sandwiches(swaps)
|
delete_sandwiches_for_blocks(
|
||||||
logger.info(f"Block: {block_number} -- Found {len(sandwiches)} sandwiches")
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
delete_sandwiches_for_block(inspect_db_session, block_number)
|
|
||||||
write_sandwiches(inspect_db_session, sandwiches)
|
write_sandwiches(inspect_db_session, sandwiches)
|
||||||
|
|
||||||
punk_bids = get_punk_bids(classified_traces)
|
delete_punk_bids_for_blocks(
|
||||||
delete_punk_bids_for_block(inspect_db_session, block_number)
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
write_punk_bids(inspect_db_session, punk_bids)
|
write_punk_bids(inspect_db_session, punk_bids)
|
||||||
|
|
||||||
punk_bid_acceptances = get_punk_bid_acceptances(classified_traces)
|
delete_punk_bid_acceptances_for_blocks(
|
||||||
delete_punk_bid_acceptances_for_block(inspect_db_session, block_number)
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
write_punk_bid_acceptances(inspect_db_session, punk_bid_acceptances)
|
write_punk_bid_acceptances(inspect_db_session, punk_bid_acceptances)
|
||||||
|
|
||||||
punk_snipes = get_punk_snipes(punk_bids, punk_bid_acceptances)
|
delete_punk_snipes_for_blocks(
|
||||||
logger.info(f"Block: {block_number} -- Found {len(punk_snipes)} punk snipes")
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
delete_punk_snipes_for_block(inspect_db_session, block_number)
|
|
||||||
write_punk_snipes(inspect_db_session, punk_snipes)
|
write_punk_snipes(inspect_db_session, punk_snipes)
|
||||||
|
|
||||||
nft_trades = get_nft_trades(classified_traces)
|
delete_nft_trades_for_blocks(
|
||||||
logger.info(f"Block: {block_number} -- Found {len(nft_trades)} nft trades")
|
inspect_db_session, after_block_number, before_block_number
|
||||||
|
)
|
||||||
delete_nft_trades_for_block(inspect_db_session, block_number)
|
|
||||||
write_nft_trades(inspect_db_session, nft_trades)
|
write_nft_trades(inspect_db_session, nft_trades)
|
||||||
|
|
||||||
miner_payments = get_miner_payments(
|
delete_miner_payments_for_blocks(
|
||||||
block.miner, block.base_fee_per_gas, classified_traces, block.receipts
|
inspect_db_session, after_block_number, before_block_number
|
||||||
)
|
)
|
||||||
|
|
||||||
delete_miner_payments_for_block(inspect_db_session, block_number)
|
|
||||||
write_miner_payments(inspect_db_session, miner_payments)
|
write_miner_payments(inspect_db_session, miner_payments)
|
||||||
|
@ -10,7 +10,7 @@ from web3.eth import AsyncEth
|
|||||||
|
|
||||||
from mev_inspect.block import create_from_block_number
|
from mev_inspect.block import create_from_block_number
|
||||||
from mev_inspect.classifiers.trace import TraceClassifier
|
from mev_inspect.classifiers.trace import TraceClassifier
|
||||||
from mev_inspect.inspect_block import inspect_block
|
from mev_inspect.inspect_block import inspect_block, inspect_many_blocks
|
||||||
from mev_inspect.provider import get_base_provider
|
from mev_inspect.provider import get_base_provider
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -50,12 +50,23 @@ class MEVInspector:
|
|||||||
trace_db_session=self.trace_db_session,
|
trace_db_session=self.trace_db_session,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def inspect_many_blocks(self, after_block: int, before_block: int):
|
async def inspect_many_blocks(
|
||||||
|
self,
|
||||||
|
after_block: int,
|
||||||
|
before_block: int,
|
||||||
|
block_batch_size: int = 10,
|
||||||
|
):
|
||||||
tasks = []
|
tasks = []
|
||||||
for block_number in range(after_block, before_block):
|
for block_number in range(after_block, before_block, block_batch_size):
|
||||||
|
batch_after_block = block_number
|
||||||
|
batch_before_block = min(block_number + block_batch_size, before_block)
|
||||||
|
|
||||||
tasks.append(
|
tasks.append(
|
||||||
asyncio.ensure_future(
|
asyncio.ensure_future(
|
||||||
self.safe_inspect_block(block_number=block_number)
|
self.safe_inspect_many_blocks(
|
||||||
|
after_block_number=batch_after_block,
|
||||||
|
before_block_number=batch_before_block,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
logger.info(f"Gathered {len(tasks)} blocks to inspect")
|
logger.info(f"Gathered {len(tasks)} blocks to inspect")
|
||||||
@ -64,16 +75,22 @@ class MEVInspector:
|
|||||||
except CancelledError:
|
except CancelledError:
|
||||||
logger.info("Requested to exit, cleaning up...")
|
logger.info("Requested to exit, cleaning up...")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Existed due to {type(e)}")
|
logger.error(f"Exited due to {type(e)}")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
raise
|
||||||
|
|
||||||
async def safe_inspect_block(self, block_number: int):
|
async def safe_inspect_many_blocks(
|
||||||
|
self,
|
||||||
|
after_block_number: int,
|
||||||
|
before_block_number: int,
|
||||||
|
):
|
||||||
async with self.max_concurrency:
|
async with self.max_concurrency:
|
||||||
return await inspect_block(
|
return await inspect_many_blocks(
|
||||||
self.inspect_db_session,
|
self.inspect_db_session,
|
||||||
self.base_provider,
|
self.base_provider,
|
||||||
self.w3,
|
self.w3,
|
||||||
self.trace_classifier,
|
self.trace_classifier,
|
||||||
block_number,
|
after_block_number,
|
||||||
|
before_block_number,
|
||||||
trace_db_session=self.trace_db_session,
|
trace_db_session=self.trace_db_session,
|
||||||
)
|
)
|
||||||
|
@ -14,3 +14,4 @@ class ArbitrageModel(Base):
|
|||||||
start_amount = Column(Numeric, nullable=False)
|
start_amount = Column(Numeric, nullable=False)
|
||||||
end_amount = Column(Numeric, nullable=False)
|
end_amount = Column(Numeric, nullable=False)
|
||||||
profit_amount = Column(Numeric, nullable=False)
|
profit_amount = Column(Numeric, nullable=False)
|
||||||
|
error = Column(String, nullable=True)
|
||||||
|
@ -16,3 +16,4 @@ class LiquidationModel(Base):
|
|||||||
transaction_hash = Column(String, primary_key=True)
|
transaction_hash = Column(String, primary_key=True)
|
||||||
trace_address = Column(ARRAY(Integer), primary_key=True)
|
trace_address = Column(ARRAY(Integer), primary_key=True)
|
||||||
block_number = Column(Numeric, nullable=False)
|
block_number = Column(Numeric, nullable=False)
|
||||||
|
error = Column(String, nullable=True)
|
||||||
|
@ -7,7 +7,7 @@ from mev_inspect.schemas.prices import (
|
|||||||
LINK_TOKEN_ADDRESS,
|
LINK_TOKEN_ADDRESS,
|
||||||
REN_TOKEN_ADDRESS,
|
REN_TOKEN_ADDRESS,
|
||||||
UNI_TOKEN_ADDRESS,
|
UNI_TOKEN_ADDRESS,
|
||||||
USDC_TOKEN_ADDRESS_ADDRESS,
|
USDC_TOKEN_ADDRESS,
|
||||||
WBTC_TOKEN_ADDRESS,
|
WBTC_TOKEN_ADDRESS,
|
||||||
YEARN_TOKEN_ADDRESS,
|
YEARN_TOKEN_ADDRESS,
|
||||||
Price,
|
Price,
|
||||||
@ -19,7 +19,7 @@ SUPPORTED_TOKENS = [
|
|||||||
ETH_TOKEN_ADDRESS,
|
ETH_TOKEN_ADDRESS,
|
||||||
LINK_TOKEN_ADDRESS,
|
LINK_TOKEN_ADDRESS,
|
||||||
AAVE_TOKEN_ADDRESS,
|
AAVE_TOKEN_ADDRESS,
|
||||||
USDC_TOKEN_ADDRESS_ADDRESS,
|
USDC_TOKEN_ADDRESS,
|
||||||
REN_TOKEN_ADDRESS,
|
REN_TOKEN_ADDRESS,
|
||||||
WBTC_TOKEN_ADDRESS,
|
WBTC_TOKEN_ADDRESS,
|
||||||
YEARN_TOKEN_ADDRESS,
|
YEARN_TOKEN_ADDRESS,
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from typing import List
|
from typing import List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
@ -14,3 +14,4 @@ class Arbitrage(BaseModel):
|
|||||||
start_amount: int
|
start_amount: int
|
||||||
end_amount: int
|
end_amount: int
|
||||||
profit_amount: int
|
profit_amount: int
|
||||||
|
error: Optional[str]
|
||||||
|
@ -16,3 +16,4 @@ class Liquidation(BaseModel):
|
|||||||
transaction_hash: str
|
transaction_hash: str
|
||||||
trace_address: List[int]
|
trace_address: List[int]
|
||||||
block_number: str
|
block_number: str
|
||||||
|
error: Optional[str]
|
||||||
|
@ -7,8 +7,11 @@ LINK_TOKEN_ADDRESS = "0x514910771af9ca656af840dff83e8264ecf986ca"
|
|||||||
YEARN_TOKEN_ADDRESS = "0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e"
|
YEARN_TOKEN_ADDRESS = "0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e"
|
||||||
AAVE_TOKEN_ADDRESS = "0x7fc66500c84a76ad7e9c93437bfc5ac33e2ddae9"
|
AAVE_TOKEN_ADDRESS = "0x7fc66500c84a76ad7e9c93437bfc5ac33e2ddae9"
|
||||||
UNI_TOKEN_ADDRESS = "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984"
|
UNI_TOKEN_ADDRESS = "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984"
|
||||||
USDC_TOKEN_ADDRESS_ADDRESS = "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
|
USDC_TOKEN_ADDRESS = "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
|
||||||
|
DAI_TOKEN_ADDRESS = "0x6b175474e89094c44da98b954eedeac495271d0f"
|
||||||
REN_TOKEN_ADDRESS = "0x408e41876cccdc0f92210600ef50372656052a38"
|
REN_TOKEN_ADDRESS = "0x408e41876cccdc0f92210600ef50372656052a38"
|
||||||
|
CUSDC_TOKEN_ADDRESS = "0x39aa39c021dfbae8fac545936693ac917d5e7563"
|
||||||
|
CDAI_TOKEN_ADDRESS = "0x5d3a536e4d6dbd6114cc1ead35777bab948e3643"
|
||||||
|
|
||||||
|
|
||||||
class Price(BaseModel):
|
class Price(BaseModel):
|
||||||
|
1
tests/blocks/11473321.json
Normal file
1
tests/blocks/11473321.json
Normal file
File diff suppressed because one or more lines are too long
@ -57,3 +57,23 @@ def test_arbitrage_real_block(trace_classifier: TraceClassifier):
|
|||||||
== "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
|
== "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
|
||||||
)
|
)
|
||||||
assert arbitrage_2.profit_amount == 53560707941943273628
|
assert arbitrage_2.profit_amount == 53560707941943273628
|
||||||
|
|
||||||
|
|
||||||
|
def test_reverting_arbitrage(trace_classifier: TraceClassifier):
|
||||||
|
block = load_test_block(11473321)
|
||||||
|
classified_traces = trace_classifier.classify(block.traces)
|
||||||
|
|
||||||
|
swaps = get_swaps(classified_traces)
|
||||||
|
assert len(swaps) == 38
|
||||||
|
|
||||||
|
arbitrages = get_arbitrages(list(swaps))
|
||||||
|
assert len(arbitrages) == 4
|
||||||
|
|
||||||
|
arbitrage_1 = [
|
||||||
|
arb
|
||||||
|
for arb in arbitrages
|
||||||
|
if arb.transaction_hash
|
||||||
|
== "0x565146ec57af69208b4a37e3a138ab85c6a6ff358fffb0077824a7378a67c4d6"
|
||||||
|
][0]
|
||||||
|
|
||||||
|
assert arbitrage_1.error == "Reverted"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from typing import List
|
from typing import List, Tuple
|
||||||
|
|
||||||
from mev_inspect.arbitrages import _get_all_routes, get_arbitrages
|
from mev_inspect.arbitrages import _get_shortest_route, get_arbitrages
|
||||||
from mev_inspect.classifiers.specs.uniswap import (
|
from mev_inspect.classifiers.specs.uniswap import (
|
||||||
UNISWAP_V2_PAIR_ABI_NAME,
|
UNISWAP_V2_PAIR_ABI_NAME,
|
||||||
UNISWAP_V3_POOL_ABI_NAME,
|
UNISWAP_V3_POOL_ABI_NAME,
|
||||||
@ -171,39 +171,41 @@ def test_three_pool_arbitrage(get_transaction_hashes, get_addresses):
|
|||||||
assert arbitrage.profit_amount == first_token_out_amount - first_token_in_amount
|
assert arbitrage.profit_amount == first_token_out_amount - first_token_in_amount
|
||||||
|
|
||||||
|
|
||||||
def test_get_all_routes():
|
def test_get_shortest_route():
|
||||||
# A -> B, B -> A
|
# A -> B, B -> A
|
||||||
start_swap = create_generic_swap("0xa", "0xb")
|
start_swap = create_generic_swap("0xa", "0xb")
|
||||||
end_swap = create_generic_swap("0xb", "0xa")
|
end_swap = create_generic_swap("0xb", "0xa")
|
||||||
routes = _get_all_routes(start_swap, end_swap, [])
|
shortest_route = _get_shortest_route(start_swap, [end_swap], [])
|
||||||
assert len(routes) == 1
|
assert shortest_route is not None
|
||||||
|
assert len(shortest_route) == 2
|
||||||
|
|
||||||
# A->B, B->C, C->A
|
# A->B, B->C, C->A
|
||||||
start_swap = create_generic_swap("0xa", "0xb")
|
start_swap = create_generic_swap("0xa", "0xb")
|
||||||
other_swaps = [create_generic_swap("0xb", "0xc")]
|
other_swaps = [create_generic_swap("0xb", "0xc")]
|
||||||
end_swap = create_generic_swap("0xc", "0xa")
|
end_swap = create_generic_swap("0xc", "0xa")
|
||||||
routes = _get_all_routes(start_swap, end_swap, other_swaps)
|
shortest_route = _get_shortest_route(start_swap, [end_swap], other_swaps)
|
||||||
assert len(routes) == 1
|
assert shortest_route is not None
|
||||||
|
assert len(shortest_route) == 3
|
||||||
|
|
||||||
# A->B, B->C, C->A + A->D
|
# A->B, B->C, C->A + A->D
|
||||||
other_swaps.append(create_generic_swap("0xa", "0xd"))
|
other_swaps.append(create_generic_swap("0xa", "0xd"))
|
||||||
routes = _get_all_routes(start_swap, end_swap, other_swaps)
|
shortest_route = _get_shortest_route(start_swap, [end_swap], other_swaps)
|
||||||
assert len(routes) == 1
|
assert shortest_route is not None
|
||||||
|
assert len(shortest_route) == 3
|
||||||
|
|
||||||
# A->B, B->C, C->A + A->D B->E
|
# A->B, B->C, C->A + A->D B->E
|
||||||
other_swaps.append(create_generic_swap("0xb", "0xe"))
|
other_swaps.append(create_generic_swap("0xb", "0xe"))
|
||||||
routes = _get_all_routes(start_swap, end_swap, other_swaps)
|
shortest_route = _get_shortest_route(start_swap, [end_swap], other_swaps)
|
||||||
assert len(routes) == 1
|
assert shortest_route is not None
|
||||||
|
assert len(shortest_route) == 3
|
||||||
|
|
||||||
# A->B, B->A, B->C, C->A
|
# A->B, B->A, B->C, C->A
|
||||||
other_swaps = [create_generic_swap("0xb", "0xa"), create_generic_swap("0xb", "0xc")]
|
other_swaps = [create_generic_swap("0xb", "0xa"), create_generic_swap("0xb", "0xc")]
|
||||||
routes = _get_all_routes(start_swap, end_swap, other_swaps)
|
actual_shortest_route = _get_shortest_route(start_swap, [end_swap], other_swaps)
|
||||||
assert len(routes) == 1
|
expected_shortest_route = [("0xa", "0xb"), ("0xb", "0xc"), ("0xc", "0xa")]
|
||||||
expect_simple_route = [["0xa", "0xb"], ["0xb", "0xc"], ["0xc", "0xa"]]
|
|
||||||
assert len(routes[0]) == len(expect_simple_route)
|
assert actual_shortest_route is not None
|
||||||
for i in range(len(expect_simple_route)):
|
_assert_route_tokens_equal(actual_shortest_route, expected_shortest_route)
|
||||||
assert expect_simple_route[i][0] == routes[0][i].token_in_address
|
|
||||||
assert expect_simple_route[i][1] == routes[0][i].token_out_address
|
|
||||||
|
|
||||||
# A->B, B->C, C->D, D->A, B->D
|
# A->B, B->C, C->D, D->A, B->D
|
||||||
end_swap = create_generic_swap("0xd", "0xa")
|
end_swap = create_generic_swap("0xd", "0xa")
|
||||||
@ -212,8 +214,24 @@ def test_get_all_routes():
|
|||||||
create_generic_swap("0xc", "0xd"),
|
create_generic_swap("0xc", "0xd"),
|
||||||
create_generic_swap("0xb", "0xd"),
|
create_generic_swap("0xb", "0xd"),
|
||||||
]
|
]
|
||||||
routes = _get_all_routes(start_swap, end_swap, other_swaps)
|
expected_shortest_route = [("0xa", "0xb"), ("0xb", "0xd"), ("0xd", "0xa")]
|
||||||
assert len(routes) == 2
|
actual_shortest_route = _get_shortest_route(start_swap, [end_swap], other_swaps)
|
||||||
|
|
||||||
|
assert actual_shortest_route is not None
|
||||||
|
_assert_route_tokens_equal(actual_shortest_route, expected_shortest_route)
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_route_tokens_equal(
|
||||||
|
route: List[Swap],
|
||||||
|
expected_token_in_out_pairs: List[Tuple[str, str]],
|
||||||
|
) -> None:
|
||||||
|
assert len(route) == len(expected_token_in_out_pairs)
|
||||||
|
|
||||||
|
for i, [expected_token_in, expected_token_out] in enumerate(
|
||||||
|
expected_token_in_out_pairs
|
||||||
|
):
|
||||||
|
assert expected_token_in == route[i].token_in_address
|
||||||
|
assert expected_token_out == route[i].token_out_address
|
||||||
|
|
||||||
|
|
||||||
def create_generic_swap(
|
def create_generic_swap(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user