Compare commits
4 Commits
main
...
fix_sandwi
Author | SHA1 | Date | |
---|---|---|---|
|
458ad7f7f5 | ||
|
27199ac65b | ||
|
deb6853828 | ||
|
c8da3e34ae |
2
.github/workflows/github-actions.yml
vendored
2
.github/workflows/github-actions.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
|||||||
- name: Bootstrap poetry
|
- name: Bootstrap poetry
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
curl -sSL https://install.python-poetry.org \
|
curl -sL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py \
|
||||||
| python - -y
|
| python - -y
|
||||||
|
|
||||||
- name: Update PATH
|
- name: Update PATH
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -25,6 +25,3 @@ cache
|
|||||||
|
|
||||||
# pycharm
|
# pycharm
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
.env
|
|
||||||
.python-version
|
|
||||||
|
21
LICENSE
21
LICENSE
@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2023 Flashbots
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
126
MONOLITHIC.md
126
MONOLITHIC.md
@ -1,126 +0,0 @@
|
|||||||
|
|
||||||
# Running mev-inspect-py without kubernetes ('monolithic mode')
|
|
||||||
|
|
||||||
Running mev-inspect-py outside of kubernetes can be useful for debug purposes. In this case, the steps for installation are:
|
|
||||||
1. Install dependencies (pyenv, poetry, postgres)
|
|
||||||
1. Set up python virtual environment using matching python version (3.9.x) and install required python modules using poetry
|
|
||||||
1. Create postgres database
|
|
||||||
1. Run database migrations
|
|
||||||
|
|
||||||
The database credentials and archive node address used by mev-inspect-py need to be loaded into environment variables (both for database migrations and to run mev-inspect-py).
|
|
||||||
|
|
||||||
## Ubuntu install instructions
|
|
||||||
|
|
||||||
So, starting from a clean Ubuntu 22.04 installation, the prerequisites for pyenv, psycopg2 (python3-dev libpq-dev) can be installed with
|
|
||||||
|
|
||||||
`sudo apt install -y make build-essential git libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev liblzma-dev python3-dev libpq-dev`
|
|
||||||
|
|
||||||
### pyenv
|
|
||||||
Install pyenv using the web installer
|
|
||||||
|
|
||||||
`curl https://pyenv.run | bash`
|
|
||||||
|
|
||||||
and add the following to `~/.bashrc` (if running locally) or `~/.profile` (if running over ssh).
|
|
||||||
|
|
||||||
```
|
|
||||||
export PYENV_ROOT="$HOME/.pyenv"
|
|
||||||
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
|
|
||||||
eval "$(pyenv init -)"
|
|
||||||
```
|
|
||||||
|
|
||||||
Then update the current shell by running `source ~/.bashrc` or `source ~/.profile` as appropriate.
|
|
||||||
|
|
||||||
### Poetry
|
|
||||||
|
|
||||||
Install Poetry using the web installer
|
|
||||||
|
|
||||||
`curl -sSL https://install.python-poetry.org | python3 -`
|
|
||||||
|
|
||||||
add the following to `~/.bashrc` (if running locally) or `~/.profile` (if running over ssh)
|
|
||||||
|
|
||||||
`export PATH="/home/user/.local/bin:$PATH"`
|
|
||||||
|
|
||||||
If running over ssh you should also add the following to `~/.profile` to prevent [Poetry errors](https://github.com/python-poetry/poetry/issues/1917) from a lack of active keyring:
|
|
||||||
|
|
||||||
`export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring`
|
|
||||||
|
|
||||||
Again update current shell by running `source ~/.bashrc` or `source ~/.profile` as appropriate.
|
|
||||||
|
|
||||||
### postgres
|
|
||||||
We have tested two alternatives for postgres - installing locally or as a container.
|
|
||||||
|
|
||||||
#### Option 1: Installing locally
|
|
||||||
|
|
||||||
To install locally from a clean Ubuntu 22.04 installation, run:
|
|
||||||
`sudo apt install postgresql postgresql-contrib`
|
|
||||||
|
|
||||||
Note: You may need to reconfigure your pg-hba.conf to allow local access.
|
|
||||||
|
|
||||||
#### Option 2: Installing docker
|
|
||||||
|
|
||||||
To avoid interfering with your local postgres instance, you may prefer to run postgres within a docker container.
|
|
||||||
For docker installation instructions, please refer to https://docs.docker.com/engine/install/ubuntu/
|
|
||||||
|
|
||||||
### mev-inspect-py
|
|
||||||
|
|
||||||
With all dependencies now installed, clone the mev-inspec-py repo
|
|
||||||
```
|
|
||||||
git clone https://github.com/flashbots/mev-inspect-py.git
|
|
||||||
cd mev-inspect-py
|
|
||||||
```
|
|
||||||
We now install the required pythn version and use Poetry to install the required python modules into a virtual environment.
|
|
||||||
|
|
||||||
```
|
|
||||||
pyenv install 3.9.16
|
|
||||||
pyenv local 3.9.16
|
|
||||||
poetry env use 3.9.16
|
|
||||||
poetry install
|
|
||||||
```
|
|
||||||
|
|
||||||
### Create database
|
|
||||||
mev-inspect-py outputs to a postgres database, so we need to set this up. There are various ways of doing this, two options are presented here.
|
|
||||||
|
|
||||||
#### Option 1 — Run postgres locally
|
|
||||||
```
|
|
||||||
sudo -u postgres psql
|
|
||||||
\password
|
|
||||||
postgres
|
|
||||||
create database mev_inspect;
|
|
||||||
\q
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Option 2 — Use postgres docker image
|
|
||||||
To avoid interfering with your local postgres instance, you may prefer to run postgres within a docker container. First ensure that postgres is not currently running to ensure port `5432` is available:
|
|
||||||
`sudo systemctl stop postgresql`
|
|
||||||
and then start a containerised postgres instance:
|
|
||||||
`sudo docker run -d -p 5432:5432 -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=mev_inspect postgres`
|
|
||||||
|
|
||||||
### Environment variables
|
|
||||||
We will need to set a few environment variables to use mev-inspect-py. **These will be required every time mev-inspect-py runs**, so again you may wish to add these to your `~/.bashrc` and/or `~/.profile` as appropriate. Note that you need to substitute the correct URL for your archive node below if you are not running Erigon locally.
|
|
||||||
```
|
|
||||||
export POSTGRES_USER=postgres
|
|
||||||
export POSTGRES_PASSWORD=postgres
|
|
||||||
export POSTGRES_HOST=localhost
|
|
||||||
export RPC_URL="http://127.0.0.1:8545"
|
|
||||||
```
|
|
||||||
### Database migrations
|
|
||||||
Finally run the database migrations and fetch price information:
|
|
||||||
|
|
||||||
```
|
|
||||||
poetry run alembic upgrade head
|
|
||||||
poetry run fetch-all-prices
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage instructions
|
|
||||||
The same functionality available through kubernetes can be run in 'monolithic mode', but the relevant functions now need to be invoked by Poetry directly. So to inspect a single block, run for example:
|
|
||||||
|
|
||||||
`poetry run inspect-block 16379706`
|
|
||||||
|
|
||||||
Or to inspect a range of blocks:
|
|
||||||
|
|
||||||
`poetry run inspect-many-blocks 16379606 16379706`
|
|
||||||
|
|
||||||
Or to run the test suite:
|
|
||||||
|
|
||||||
`poetry run pytest tests`
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
|||||||
⚠️ This tool has been deprecated. You can visit [Flashbots Data](https://datasets.flashbots.net/) for historical mev-inspect data on Ethereum and join us on the [Flashbots forum](https://collective.flashbots.net). ⚠️
|
|
||||||
|
|
||||||
# mev-inspect-py
|
# mev-inspect-py
|
||||||
|
|
||||||
[](https://github.com/RichardLitt/standard-readme)
|
[](https://github.com/RichardLitt/standard-readme)
|
||||||
@ -39,7 +37,7 @@ Set an environment variable `RPC_URL` to an RPC for fetching blocks.
|
|||||||
|
|
||||||
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
||||||
|
|
||||||
[pokt.network](https://www.pokt.network/)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
[pokt.network](pokt.network)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
@ -68,10 +66,6 @@ And load prices data
|
|||||||
./mev prices fetch-all
|
./mev prices fetch-all
|
||||||
```
|
```
|
||||||
|
|
||||||
## Monolithic (non-kubernetes) install instructions
|
|
||||||
|
|
||||||
For an alternative means of running mev-inspect-py for smaller set-ups or debug purposes see the [monolithic install instructions](MONOLITHIC.md).
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Inspect a single block
|
### Inspect a single block
|
||||||
|
@ -163,8 +163,6 @@ def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, List[Swap]]]
|
|||||||
if (
|
if (
|
||||||
potential_start_swap.token_in_address
|
potential_start_swap.token_in_address
|
||||||
== potential_end_swap.token_out_address
|
== potential_end_swap.token_out_address
|
||||||
and potential_start_swap.contract_address
|
|
||||||
!= potential_end_swap.contract_address
|
|
||||||
and potential_start_swap.from_address == potential_end_swap.to_address
|
and potential_start_swap.from_address == potential_end_swap.to_address
|
||||||
and not potential_start_swap.from_address in pool_addrs
|
and not potential_start_swap.from_address in pool_addrs
|
||||||
):
|
):
|
||||||
|
@ -34,7 +34,8 @@ async def create_from_block_number(
|
|||||||
_find_or_fetch_block_traces(w3, block_number, trace_db_session),
|
_find_or_fetch_block_traces(w3, block_number, trace_db_session),
|
||||||
_find_or_fetch_base_fee_per_gas(w3, block_number, trace_db_session),
|
_find_or_fetch_base_fee_per_gas(w3, block_number, trace_db_session),
|
||||||
)
|
)
|
||||||
miner_address = await _find_or_fetch_miner_address(w3, block_number, traces)
|
|
||||||
|
miner_address = _get_miner_address_from_traces(traces)
|
||||||
|
|
||||||
return Block(
|
return Block(
|
||||||
block_number=block_number,
|
block_number=block_number,
|
||||||
@ -179,27 +180,11 @@ def _find_base_fee_per_gas(
|
|||||||
return base_fee
|
return base_fee
|
||||||
|
|
||||||
|
|
||||||
async def _find_or_fetch_miner_address(
|
|
||||||
w3,
|
|
||||||
block_number: int,
|
|
||||||
traces: List[Trace],
|
|
||||||
) -> Optional[str]:
|
|
||||||
# eth1 blocks
|
|
||||||
miner_address = _get_miner_address_from_traces(traces)
|
|
||||||
if miner_address is not None:
|
|
||||||
return miner_address
|
|
||||||
return await _fetch_miner_eth2(w3, block_number)
|
|
||||||
|
|
||||||
|
|
||||||
async def _fetch_miner_eth2(w3, block_number: int) -> Optional[str]:
|
|
||||||
block_json = await w3.eth.get_block(block_number)
|
|
||||||
return block_json["miner"]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_miner_address_from_traces(traces: List[Trace]) -> Optional[str]:
|
def _get_miner_address_from_traces(traces: List[Trace]) -> Optional[str]:
|
||||||
for trace in traces:
|
for trace in traces:
|
||||||
if trace.type == TraceType.reward:
|
if trace.type == TraceType.reward:
|
||||||
return trace.action["author"]
|
return trace.action["author"]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -94,9 +94,6 @@ def create_swap_from_pool_transfers(
|
|||||||
transfer_in = transfers_to_pool[-1]
|
transfer_in = transfers_to_pool[-1]
|
||||||
transfer_out = transfers_from_pool_to_recipient[0]
|
transfer_out = transfers_from_pool_to_recipient[0]
|
||||||
|
|
||||||
if transfer_in.token_address == transfer_out.token_address:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return Swap(
|
return Swap(
|
||||||
abi_name=trace.abi_name,
|
abi_name=trace.abi_name,
|
||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
|
@ -30,9 +30,6 @@ def get_liquidations(classified_traces: List[ClassifiedTrace]) -> List[Liquidati
|
|||||||
if _is_child_liquidation(trace, parent_liquidations):
|
if _is_child_liquidation(trace, parent_liquidations):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if trace.error == "Reverted":
|
|
||||||
continue
|
|
||||||
|
|
||||||
if trace.classification == Classification.liquidate:
|
if trace.classification == Classification.liquidate:
|
||||||
|
|
||||||
parent_liquidations.append(trace)
|
parent_liquidations.append(trace)
|
||||||
|
@ -74,10 +74,7 @@ def _get_punk_bid_acceptances_for_transaction(
|
|||||||
if not isinstance(trace, DecodedCallTrace):
|
if not isinstance(trace, DecodedCallTrace):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif (
|
elif trace.classification == Classification.punk_accept_bid:
|
||||||
trace.classification == Classification.punk_accept_bid
|
|
||||||
and trace.error is None
|
|
||||||
):
|
|
||||||
punk_accept_bid = PunkBidAcceptance(
|
punk_accept_bid = PunkBidAcceptance(
|
||||||
block_number=trace.block_number,
|
block_number=trace.block_number,
|
||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
|
@ -42,32 +42,57 @@ def _get_sandwich_starting_with_swap(
|
|||||||
]:
|
]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for other_swap in rest_swaps:
|
for back_swap in rest_swaps:
|
||||||
if other_swap.transaction_hash == front_swap.transaction_hash:
|
if back_swap.transaction_hash == front_swap.transaction_hash:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if other_swap.contract_address == front_swap.contract_address:
|
if back_swap.contract_address == front_swap.contract_address:
|
||||||
if (
|
if (
|
||||||
other_swap.token_in_address == front_swap.token_in_address
|
back_swap.token_in_address == front_swap.token_in_address
|
||||||
and other_swap.token_out_address == front_swap.token_out_address
|
and back_swap.token_out_address == front_swap.token_out_address
|
||||||
and other_swap.from_address != sandwicher_address
|
and back_swap.from_address != sandwicher_address
|
||||||
):
|
):
|
||||||
sandwiched_swaps.append(other_swap)
|
sandwiched_swaps.append(back_swap)
|
||||||
elif (
|
elif (
|
||||||
other_swap.token_out_address == front_swap.token_in_address
|
back_swap.token_out_address == front_swap.token_in_address
|
||||||
and other_swap.token_in_address == front_swap.token_out_address
|
and back_swap.token_in_address == front_swap.token_out_address
|
||||||
and other_swap.from_address == sandwicher_address
|
and back_swap.from_address == sandwicher_address
|
||||||
):
|
):
|
||||||
if len(sandwiched_swaps) > 0:
|
if len(sandwiched_swaps) > 0:
|
||||||
|
profit_amount: float
|
||||||
|
if back_swap.token_in_amount == 0 and back_swap.error is None:
|
||||||
|
raise ValueError("Backrun cannot swap 0 tokens")
|
||||||
|
if back_swap.token_in_amount == front_swap.token_out_amount:
|
||||||
|
profit_amount = (
|
||||||
|
back_swap.token_out_amount - front_swap.token_in_amount
|
||||||
|
)
|
||||||
|
|
||||||
|
if back_swap.token_in_amount > front_swap.token_out_amount:
|
||||||
|
exchange_rate = (
|
||||||
|
front_swap.token_out_amount / back_swap.token_in_amount
|
||||||
|
)
|
||||||
|
profit_amount = (
|
||||||
|
exchange_rate * back_swap.token_out_amount
|
||||||
|
- front_swap.token_in_amount
|
||||||
|
)
|
||||||
|
|
||||||
|
if back_swap.token_in_amount < front_swap.token_out_amount:
|
||||||
|
exchange_rate = (
|
||||||
|
back_swap.token_in_amount / front_swap.token_out_amount
|
||||||
|
)
|
||||||
|
profit_amount = (
|
||||||
|
back_swap.token_out_amount
|
||||||
|
- exchange_rate * front_swap.token_in_amount
|
||||||
|
)
|
||||||
|
|
||||||
return Sandwich(
|
return Sandwich(
|
||||||
block_number=front_swap.block_number,
|
block_number=front_swap.block_number,
|
||||||
sandwicher_address=sandwicher_address,
|
sandwicher_address=sandwicher_address,
|
||||||
frontrun_swap=front_swap,
|
frontrun_swap=front_swap,
|
||||||
backrun_swap=other_swap,
|
backrun_swap=back_swap,
|
||||||
sandwiched_swaps=sandwiched_swaps,
|
sandwiched_swaps=sandwiched_swaps,
|
||||||
profit_token_address=front_swap.token_in_address,
|
profit_token_address=front_swap.token_in_address,
|
||||||
profit_amount=other_swap.token_out_amount
|
profit_amount=profit_amount,
|
||||||
- front_swap.token_in_amount,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -13,7 +13,7 @@ class CallResult(CamelModel):
|
|||||||
gas_used: int
|
gas_used: int
|
||||||
|
|
||||||
@validator("gas_used", pre=True)
|
@validator("gas_used", pre=True)
|
||||||
def maybe_hex_to_int(cls, v):
|
def maybe_hex_to_int(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return hex_to_int(v)
|
return hex_to_int(v)
|
||||||
return v
|
return v
|
||||||
@ -27,7 +27,7 @@ class CallAction(Web3Model):
|
|||||||
gas: int
|
gas: int
|
||||||
|
|
||||||
@validator("value", "gas", pre=True)
|
@validator("value", "gas", pre=True)
|
||||||
def maybe_hex_to_int(cls, v):
|
def maybe_hex_to_int(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return hex_to_int(v)
|
return hex_to_int(v)
|
||||||
return v
|
return v
|
||||||
|
@ -24,7 +24,7 @@ class Receipt(CamelModel):
|
|||||||
"cumulative_gas_used",
|
"cumulative_gas_used",
|
||||||
pre=True,
|
pre=True,
|
||||||
)
|
)
|
||||||
def maybe_hex_to_int(cls, v):
|
def maybe_hex_to_int(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return hex_to_int(v)
|
return hex_to_int(v)
|
||||||
return v
|
return v
|
||||||
|
2654
poetry.lock
generated
2654
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -10,13 +10,12 @@ web3 = "^5.23.0"
|
|||||||
pydantic = "^1.8.2"
|
pydantic = "^1.8.2"
|
||||||
hexbytes = "^0.2.1"
|
hexbytes = "^0.2.1"
|
||||||
click = "^8.0.1"
|
click = "^8.0.1"
|
||||||
psycopg2-binary = "^2.9.7"
|
psycopg2 = "^2.9.1"
|
||||||
aiohttp = "^3.8.0"
|
aiohttp = "^3.8.0"
|
||||||
dramatiq = {extras = ["redis"], version = "^1.12.1"}
|
dramatiq = {extras = ["redis"], version = "^1.12.1"}
|
||||||
pycoingecko = "^2.2.0"
|
pycoingecko = "^2.2.0"
|
||||||
boto3 = "^1.20.48"
|
boto3 = "^1.20.48"
|
||||||
aiohttp-retry = "^2.4.6"
|
aiohttp-retry = "^2.4.6"
|
||||||
pyyaml = "^6.0.1"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pre-commit = "^2.13.0"
|
pre-commit = "^2.13.0"
|
||||||
@ -31,7 +30,6 @@ alembic = "^1.6.5"
|
|||||||
CProfileV = "^1.0.7"
|
CProfileV = "^1.0.7"
|
||||||
regex = "^2021.10.8"
|
regex = "^2021.10.8"
|
||||||
pytest-profiling = "^1.7.0"
|
pytest-profiling = "^1.7.0"
|
||||||
sqlalchemy = "^1.4.23"
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
@ -84,6 +82,3 @@ filter_files = true
|
|||||||
known_first_party = "mev_inspect"
|
known_first_party = "mev_inspect"
|
||||||
known_third_party = "alembic"
|
known_third_party = "alembic"
|
||||||
py_version=39
|
py_version=39
|
||||||
|
|
||||||
[pytest]
|
|
||||||
asyncio_mode = "auto"
|
|
File diff suppressed because one or more lines are too long
1
tests/blocks/13699765.json
Normal file
1
tests/blocks/13699765.json
Normal file
File diff suppressed because one or more lines are too long
1
tests/blocks/14659109.json
Normal file
1
tests/blocks/14659109.json
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,65 +0,0 @@
|
|||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mev_inspect.block import _find_or_fetch_miner_address
|
|
||||||
from tests.utils import load_test_block
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mocked_web3():
|
|
||||||
with patch("mev_inspect.block.Web3") as mock_web3:
|
|
||||||
yield mock_web3
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
async def test_eth1_block_miner(mocked_web3):
|
|
||||||
# Create a mock Web3 instance
|
|
||||||
mock_web3_instance = mocked_web3.return_value
|
|
||||||
|
|
||||||
# Set up the mock for web3.eth.get_block
|
|
||||||
mock_eth = mock_web3_instance.eth
|
|
||||||
mock_eth.get_block.return_value = {
|
|
||||||
"miner": "0x4a536c1f6a5d5a9c1aeca9f6d04fbbf5f0d8f4e3"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Load a sample block and remove the miner
|
|
||||||
block_number = 10921991
|
|
||||||
block = load_test_block(block_number)
|
|
||||||
block.miner = None
|
|
||||||
|
|
||||||
# Test that the miner is fetched
|
|
||||||
miner_address = await _find_or_fetch_miner_address(
|
|
||||||
w3=mock_web3_instance, traces=block.traces, block_number=block_number
|
|
||||||
) # Use 'await'
|
|
||||||
|
|
||||||
# this is within the traces object
|
|
||||||
assert miner_address == "0x52bc44d5378309ee2abf1539bf71de1b7d7be3b5"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
async def test_eth2_block_miner(mocked_web3):
|
|
||||||
# Create a mock Web3 instance
|
|
||||||
mock_web3_instance = mocked_web3.return_value
|
|
||||||
|
|
||||||
# Create a coroutine function to mock w3.eth.get_block
|
|
||||||
# pylint: disable=unused-argument
|
|
||||||
async def mock_get_block(block_number):
|
|
||||||
return {"miner": "0x4a536c1f6a5d5a9c1aeca9f6d04fbbf5f0d8f4e3"}
|
|
||||||
|
|
||||||
# Mock w3.eth.get_block with the coroutine function
|
|
||||||
mock_web3_instance.eth.get_block = MagicMock(side_effect=mock_get_block)
|
|
||||||
|
|
||||||
# Load a sample block and remove the miner
|
|
||||||
block_number = 10921990
|
|
||||||
block = load_test_block(block_number)
|
|
||||||
block.miner = None
|
|
||||||
|
|
||||||
# Test that the miner is fetched
|
|
||||||
miner_address = await _find_or_fetch_miner_address(
|
|
||||||
w3=mock_web3_instance, traces=block.traces, block_number=block_number
|
|
||||||
) # Use 'await'
|
|
||||||
|
|
||||||
assert miner_address == "0x4a536c1f6a5d5a9c1aeca9f6d04fbbf5f0d8f4e3"
|
|
@ -115,18 +115,3 @@ def test_c_token_liquidation(trace_classifier: TraceClassifier):
|
|||||||
|
|
||||||
for liquidation in liquidations:
|
for liquidation in liquidations:
|
||||||
assert liquidation in result
|
assert liquidation in result
|
||||||
|
|
||||||
|
|
||||||
def test_reverted_liquidation(trace_classifier: TraceClassifier):
|
|
||||||
block_number = 15049646
|
|
||||||
transaction_hash = (
|
|
||||||
"0x6dd0d8be8a77651f64ef399b47fbc87011bd796b43349c3164ff7da965e0b345"
|
|
||||||
)
|
|
||||||
|
|
||||||
block = load_test_block(block_number)
|
|
||||||
classified_traces = trace_classifier.classify(block.traces)
|
|
||||||
result = get_liquidations(classified_traces)
|
|
||||||
|
|
||||||
assert transaction_hash not in [
|
|
||||||
liquidation.transaction_hash for liquidation in result
|
|
||||||
]
|
|
||||||
|
42
tests/test_heavy_sandwiches.py
Normal file
42
tests/test_heavy_sandwiches.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from typing import List
|
||||||
|
|
||||||
|
from mev_inspect.classifiers.trace import TraceClassifier
|
||||||
|
from mev_inspect.sandwiches import get_sandwiches
|
||||||
|
from mev_inspect.schemas.sandwiches import Sandwich
|
||||||
|
from mev_inspect.schemas.swaps import Swap
|
||||||
|
from mev_inspect.swaps import get_swaps
|
||||||
|
from tests.utils import load_test_block
|
||||||
|
|
||||||
|
|
||||||
|
def test_back_heavy_sandwich_profits(trace_classifier: TraceClassifier):
|
||||||
|
block_number = 13699765
|
||||||
|
expected_sandwicher = "0x51399b32cd0186bb32230e24167489f3b2f47870"
|
||||||
|
expected_token_address = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"
|
||||||
|
expected_profit_amount = -435805264121298944
|
||||||
|
|
||||||
|
block = load_test_block(block_number)
|
||||||
|
classified_traces = trace_classifier.classify(block.traces)
|
||||||
|
swaps: List[Swap] = get_swaps(classified_traces)
|
||||||
|
result: List[Sandwich] = get_sandwiches(swaps)
|
||||||
|
|
||||||
|
for observed_sandwich in result:
|
||||||
|
if observed_sandwich.sandwicher_address == expected_sandwicher:
|
||||||
|
assert expected_token_address == observed_sandwich.profit_token_address
|
||||||
|
assert expected_profit_amount == observed_sandwich.profit_amount
|
||||||
|
|
||||||
|
|
||||||
|
def test_front_heavy_sandwich_profits(trace_classifier: TraceClassifier):
|
||||||
|
block_number = 14659109
|
||||||
|
expected_sandwicher = "0x01ff6318440f7d5553a82294d78262d5f5084eff"
|
||||||
|
expected_token_address = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"
|
||||||
|
expected_profit_amount = -180511102573164864
|
||||||
|
|
||||||
|
block = load_test_block(block_number)
|
||||||
|
classified_traces = trace_classifier.classify(block.traces)
|
||||||
|
swaps: List[Swap] = get_swaps(classified_traces)
|
||||||
|
result: List[Sandwich] = get_sandwiches(swaps)
|
||||||
|
|
||||||
|
for observed_sandwich in result:
|
||||||
|
if observed_sandwich.sandwicher_address == expected_sandwicher:
|
||||||
|
assert expected_token_address == observed_sandwich.profit_token_address
|
||||||
|
assert expected_profit_amount == observed_sandwich.profit_amount
|
@ -2,6 +2,8 @@ import json
|
|||||||
import os
|
import os
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from pydantic import parse_file_as
|
||||||
|
|
||||||
from mev_inspect.schemas.blocks import Block
|
from mev_inspect.schemas.blocks import Block
|
||||||
from mev_inspect.schemas.sandwiches import Sandwich
|
from mev_inspect.schemas.sandwiches import Sandwich
|
||||||
|
|
||||||
@ -12,10 +14,7 @@ TEST_SANDWICHES_DIRECTORY = os.path.join(THIS_FILE_DIRECTORY, "sandwiches")
|
|||||||
|
|
||||||
def load_test_sandwiches(block_number: int) -> List[Sandwich]:
|
def load_test_sandwiches(block_number: int) -> List[Sandwich]:
|
||||||
sandwiches_path = f"{TEST_SANDWICHES_DIRECTORY}/{block_number}.json"
|
sandwiches_path = f"{TEST_SANDWICHES_DIRECTORY}/{block_number}.json"
|
||||||
|
return parse_file_as(List[Sandwich], sandwiches_path)
|
||||||
with open(sandwiches_path, "r") as file:
|
|
||||||
sandwiches_data = json.load(file)
|
|
||||||
return [Sandwich(**sandwich) for sandwich in sandwiches_data]
|
|
||||||
|
|
||||||
|
|
||||||
def load_test_block(block_number: int) -> Block:
|
def load_test_block(block_number: int) -> Block:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user