Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
8e68e251b5 | ||
|
ae2e5464c3 | ||
|
813e2034d4 | ||
|
308902e62c | ||
|
f32e62ae55 |
2
.github/workflows/github-actions.yml
vendored
2
.github/workflows/github-actions.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
|||||||
- name: Bootstrap poetry
|
- name: Bootstrap poetry
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
curl -sSL https://install.python-poetry.org \
|
curl -sL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py \
|
||||||
| python - -y
|
| python - -y
|
||||||
|
|
||||||
- name: Update PATH
|
- name: Update PATH
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -25,6 +25,3 @@ cache
|
|||||||
|
|
||||||
# pycharm
|
# pycharm
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
.env
|
|
||||||
.python-version
|
|
||||||
|
21
LICENSE
21
LICENSE
@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2023 Flashbots
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
126
MONOLITHIC.md
126
MONOLITHIC.md
@ -1,126 +0,0 @@
|
|||||||
|
|
||||||
# Running mev-inspect-py without kubernetes ('monolithic mode')
|
|
||||||
|
|
||||||
Running mev-inspect-py outside of kubernetes can be useful for debug purposes. In this case, the steps for installation are:
|
|
||||||
1. Install dependencies (pyenv, poetry, postgres)
|
|
||||||
1. Set up python virtual environment using matching python version (3.9.x) and install required python modules using poetry
|
|
||||||
1. Create postgres database
|
|
||||||
1. Run database migrations
|
|
||||||
|
|
||||||
The database credentials and archive node address used by mev-inspect-py need to be loaded into environment variables (both for database migrations and to run mev-inspect-py).
|
|
||||||
|
|
||||||
## Ubuntu install instructions
|
|
||||||
|
|
||||||
So, starting from a clean Ubuntu 22.04 installation, the prerequisites for pyenv, psycopg2 (python3-dev libpq-dev) can be installed with
|
|
||||||
|
|
||||||
`sudo apt install -y make build-essential git libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev liblzma-dev python3-dev libpq-dev`
|
|
||||||
|
|
||||||
### pyenv
|
|
||||||
Install pyenv using the web installer
|
|
||||||
|
|
||||||
`curl https://pyenv.run | bash`
|
|
||||||
|
|
||||||
and add the following to `~/.bashrc` (if running locally) or `~/.profile` (if running over ssh).
|
|
||||||
|
|
||||||
```
|
|
||||||
export PYENV_ROOT="$HOME/.pyenv"
|
|
||||||
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
|
|
||||||
eval "$(pyenv init -)"
|
|
||||||
```
|
|
||||||
|
|
||||||
Then update the current shell by running `source ~/.bashrc` or `source ~/.profile` as appropriate.
|
|
||||||
|
|
||||||
### Poetry
|
|
||||||
|
|
||||||
Install Poetry using the web installer
|
|
||||||
|
|
||||||
`curl -sSL https://install.python-poetry.org | python3 -`
|
|
||||||
|
|
||||||
add the following to `~/.bashrc` (if running locally) or `~/.profile` (if running over ssh)
|
|
||||||
|
|
||||||
`export PATH="/home/user/.local/bin:$PATH"`
|
|
||||||
|
|
||||||
If running over ssh you should also add the following to `~/.profile` to prevent [Poetry errors](https://github.com/python-poetry/poetry/issues/1917) from a lack of active keyring:
|
|
||||||
|
|
||||||
`export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring`
|
|
||||||
|
|
||||||
Again update current shell by running `source ~/.bashrc` or `source ~/.profile` as appropriate.
|
|
||||||
|
|
||||||
### postgres
|
|
||||||
We have tested two alternatives for postgres - installing locally or as a container.
|
|
||||||
|
|
||||||
#### Option 1: Installing locally
|
|
||||||
|
|
||||||
To install locally from a clean Ubuntu 22.04 installation, run:
|
|
||||||
`sudo apt install postgresql postgresql-contrib`
|
|
||||||
|
|
||||||
Note: You may need to reconfigure your pg-hba.conf to allow local access.
|
|
||||||
|
|
||||||
#### Option 2: Installing docker
|
|
||||||
|
|
||||||
To avoid interfering with your local postgres instance, you may prefer to run postgres within a docker container.
|
|
||||||
For docker installation instructions, please refer to https://docs.docker.com/engine/install/ubuntu/
|
|
||||||
|
|
||||||
### mev-inspect-py
|
|
||||||
|
|
||||||
With all dependencies now installed, clone the mev-inspec-py repo
|
|
||||||
```
|
|
||||||
git clone https://github.com/flashbots/mev-inspect-py.git
|
|
||||||
cd mev-inspect-py
|
|
||||||
```
|
|
||||||
We now install the required pythn version and use Poetry to install the required python modules into a virtual environment.
|
|
||||||
|
|
||||||
```
|
|
||||||
pyenv install 3.9.16
|
|
||||||
pyenv local 3.9.16
|
|
||||||
poetry env use 3.9.16
|
|
||||||
poetry install
|
|
||||||
```
|
|
||||||
|
|
||||||
### Create database
|
|
||||||
mev-inspect-py outputs to a postgres database, so we need to set this up. There are various ways of doing this, two options are presented here.
|
|
||||||
|
|
||||||
#### Option 1 — Run postgres locally
|
|
||||||
```
|
|
||||||
sudo -u postgres psql
|
|
||||||
\password
|
|
||||||
postgres
|
|
||||||
create database mev_inspect;
|
|
||||||
\q
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Option 2 — Use postgres docker image
|
|
||||||
To avoid interfering with your local postgres instance, you may prefer to run postgres within a docker container. First ensure that postgres is not currently running to ensure port `5432` is available:
|
|
||||||
`sudo systemctl stop postgresql`
|
|
||||||
and then start a containerised postgres instance:
|
|
||||||
`sudo docker run -d -p 5432:5432 -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=mev_inspect postgres`
|
|
||||||
|
|
||||||
### Environment variables
|
|
||||||
We will need to set a few environment variables to use mev-inspect-py. **These will be required every time mev-inspect-py runs**, so again you may wish to add these to your `~/.bashrc` and/or `~/.profile` as appropriate. Note that you need to substitute the correct URL for your archive node below if you are not running Erigon locally.
|
|
||||||
```
|
|
||||||
export POSTGRES_USER=postgres
|
|
||||||
export POSTGRES_PASSWORD=postgres
|
|
||||||
export POSTGRES_HOST=localhost
|
|
||||||
export RPC_URL="http://127.0.0.1:8545"
|
|
||||||
```
|
|
||||||
### Database migrations
|
|
||||||
Finally run the database migrations and fetch price information:
|
|
||||||
|
|
||||||
```
|
|
||||||
poetry run alembic upgrade head
|
|
||||||
poetry run fetch-all-prices
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage instructions
|
|
||||||
The same functionality available through kubernetes can be run in 'monolithic mode', but the relevant functions now need to be invoked by Poetry directly. So to inspect a single block, run for example:
|
|
||||||
|
|
||||||
`poetry run inspect-block 16379706`
|
|
||||||
|
|
||||||
Or to inspect a range of blocks:
|
|
||||||
|
|
||||||
`poetry run inspect-many-blocks 16379606 16379706`
|
|
||||||
|
|
||||||
Or to run the test suite:
|
|
||||||
|
|
||||||
`poetry run pytest tests`
|
|
||||||
|
|
@ -1,5 +1,3 @@
|
|||||||
⚠️ This tool has been deprecated. You can visit [Flashbots Data](https://datasets.flashbots.net/) for historical mev-inspect data on Ethereum and join us on the [Flashbots forum](https://collective.flashbots.net). ⚠️
|
|
||||||
|
|
||||||
# mev-inspect-py
|
# mev-inspect-py
|
||||||
|
|
||||||
[](https://github.com/RichardLitt/standard-readme)
|
[](https://github.com/RichardLitt/standard-readme)
|
||||||
@ -39,7 +37,7 @@ Set an environment variable `RPC_URL` to an RPC for fetching blocks.
|
|||||||
|
|
||||||
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
||||||
|
|
||||||
[pokt.network](https://www.pokt.network/)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
[pokt.network](pokt.network)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
@ -68,10 +66,6 @@ And load prices data
|
|||||||
./mev prices fetch-all
|
./mev prices fetch-all
|
||||||
```
|
```
|
||||||
|
|
||||||
## Monolithic (non-kubernetes) install instructions
|
|
||||||
|
|
||||||
For an alternative means of running mev-inspect-py for smaller set-ups or debug purposes see the [monolithic install instructions](MONOLITHIC.md).
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Inspect a single block
|
### Inspect a single block
|
||||||
|
10
Tiltfile
10
Tiltfile
@ -15,12 +15,14 @@ helm_remote("redis",
|
|||||||
)
|
)
|
||||||
|
|
||||||
k8s_yaml(configmap_from_dict("mev-inspect-rpc", inputs = {
|
k8s_yaml(configmap_from_dict("mev-inspect-rpc", inputs = {
|
||||||
"url" : os.environ["RPC_URL"],
|
"primary_url" : os.environ["RPC_URL"],
|
||||||
|
"secondary_url" : os.environ["SECONDARY_RPC_URL"],
|
||||||
}))
|
}))
|
||||||
|
|
||||||
k8s_yaml(configmap_from_dict("mev-inspect-listener-healthcheck", inputs = {
|
|
||||||
"url" : os.getenv("LISTENER_HEALTHCHECK_URL", default=""),
|
#k8s_yaml(configmap_from_dict("mev-inspect-listener-healthcheck", inputs = {
|
||||||
}))
|
# "url" : os.getenv("LISTENER_HEALTHCHECK_URL", default=""),
|
||||||
|
#}))
|
||||||
|
|
||||||
k8s_yaml(secret_from_dict("mev-inspect-db-credentials", inputs = {
|
k8s_yaml(secret_from_dict("mev-inspect-db-credentials", inputs = {
|
||||||
"username" : "postgres",
|
"username" : "postgres",
|
||||||
|
@ -84,7 +84,14 @@ spec:
|
|||||||
valueFrom:
|
valueFrom:
|
||||||
configMapKeyRef:
|
configMapKeyRef:
|
||||||
name: mev-inspect-rpc
|
name: mev-inspect-rpc
|
||||||
key: url
|
key: primary_url
|
||||||
|
optional: true
|
||||||
|
- name: SECONDARY_RPC_URL
|
||||||
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: mev-inspect-rpc
|
||||||
|
key: secondary_url
|
||||||
|
optional: true
|
||||||
- name: LISTENER_HEALTHCHECK_URL
|
- name: LISTENER_HEALTHCHECK_URL
|
||||||
valueFrom:
|
valueFrom:
|
||||||
configMapKeyRef:
|
configMapKeyRef:
|
||||||
@ -118,7 +125,7 @@ spec:
|
|||||||
{{- range .Values.extraEnv }}
|
{{- range .Values.extraEnv }}
|
||||||
- name: {{ .name }}
|
- name: {{ .name }}
|
||||||
value: {{ .value }}
|
value: {{ .value }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- with .Values.nodeSelector }}
|
{{- with .Values.nodeSelector }}
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
{{- toYaml . | nindent 8 }}
|
{{- toYaml . | nindent 8 }}
|
||||||
|
@ -84,7 +84,14 @@ spec:
|
|||||||
valueFrom:
|
valueFrom:
|
||||||
configMapKeyRef:
|
configMapKeyRef:
|
||||||
name: mev-inspect-rpc
|
name: mev-inspect-rpc
|
||||||
key: url
|
key: primary_url
|
||||||
|
optional: true
|
||||||
|
- name: SECONDARY_RPC_URL
|
||||||
|
valueFrom:
|
||||||
|
configMapKeyRef:
|
||||||
|
name: mev-inspect-rpc
|
||||||
|
key: secondary_url
|
||||||
|
optional: true
|
||||||
- name: LISTENER_HEALTHCHECK_URL
|
- name: LISTENER_HEALTHCHECK_URL
|
||||||
valueFrom:
|
valueFrom:
|
||||||
configMapKeyRef:
|
configMapKeyRef:
|
||||||
|
1
listener
1
listener
@ -15,7 +15,6 @@ case "$1" in
|
|||||||
--chdir /app \
|
--chdir /app \
|
||||||
--chuid flashbot \
|
--chuid flashbot \
|
||||||
--start \
|
--start \
|
||||||
--quiet \
|
|
||||||
--pidfile $PIDFILE \
|
--pidfile $PIDFILE \
|
||||||
--make-pidfile \
|
--make-pidfile \
|
||||||
--startas /bin/bash -- -c "poetry run python listener.py"
|
--startas /bin/bash -- -c "poetry run python listener.py"
|
||||||
|
77
listener.py
77
listener.py
@ -1,6 +1,8 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
from collections import deque
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
import dramatiq
|
import dramatiq
|
||||||
from aiohttp_retry import ExponentialRetry, RetryClient
|
from aiohttp_retry import ExponentialRetry, RetryClient
|
||||||
@ -28,41 +30,35 @@ logger = logging.getLogger(__name__)
|
|||||||
# lag to make sure the blocks we see are settled
|
# lag to make sure the blocks we see are settled
|
||||||
BLOCK_NUMBER_LAG = 5
|
BLOCK_NUMBER_LAG = 5
|
||||||
|
|
||||||
|
primary_rpc = os.getenv("RPC_URL")
|
||||||
|
secondary_rpc = os.getenv("SECONDARY_RPC_URL")
|
||||||
|
|
||||||
|
if os.getenv("RPC_URL") is None:
|
||||||
|
raise RuntimeError("Missing primary RPC environment variable: RPC_URL. ")
|
||||||
|
|
||||||
|
rpc_queue = deque([primary_rpc, secondary_rpc])
|
||||||
|
|
||||||
|
|
||||||
@coro
|
@coro
|
||||||
async def run():
|
async def run():
|
||||||
rpc = os.getenv("RPC_URL")
|
|
||||||
if rpc is None:
|
|
||||||
raise RuntimeError("Missing environment variable RPC_URL")
|
|
||||||
|
|
||||||
healthcheck_url = os.getenv("LISTENER_HEALTHCHECK_URL")
|
|
||||||
|
|
||||||
logger.info("Starting...")
|
logger.info("Starting...")
|
||||||
|
|
||||||
killer = GracefulKiller()
|
if _get_inspector_params(rpc_queue[0]) is None and secondary_rpc is not None:
|
||||||
|
rpc_queue.rotate(-1)
|
||||||
|
|
||||||
inspect_db_session = get_inspect_session()
|
inspect_params: Optional[Dict] = _get_inspector_params(rpc_queue[0])
|
||||||
trace_db_session = get_trace_session()
|
|
||||||
|
|
||||||
broker = connect_broker()
|
killer = inspect_params["killer"]
|
||||||
export_actor = dramatiq.actor(
|
|
||||||
realtime_export_task,
|
|
||||||
broker=broker,
|
|
||||||
queue_name=HIGH_PRIORITY_QUEUE,
|
|
||||||
priority=HIGH_PRIORITY,
|
|
||||||
)
|
|
||||||
|
|
||||||
inspector = MEVInspector(rpc)
|
|
||||||
base_provider = get_base_provider(rpc)
|
|
||||||
|
|
||||||
while not killer.kill_now:
|
while not killer.kill_now:
|
||||||
await inspect_next_block(
|
await inspect_next_block(
|
||||||
inspector,
|
inspect_params["inspector"],
|
||||||
inspect_db_session,
|
inspect_params["inspect_db_session"],
|
||||||
trace_db_session,
|
inspect_params["trace_db_session"],
|
||||||
base_provider,
|
inspect_params["base_provider"],
|
||||||
healthcheck_url,
|
inspect_params["healthcheck_url"],
|
||||||
export_actor,
|
inspect_params["export_actor"],
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Stopping...")
|
logger.info("Stopping...")
|
||||||
@ -119,6 +115,39 @@ async def ping_healthcheck_url(url):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _get_inspector_params(rpc: str) -> Optional[Dict]:
|
||||||
|
try:
|
||||||
|
healthcheck_url = os.getenv("LISTENER_HEALTHCHECK_URL")
|
||||||
|
|
||||||
|
broker = connect_broker()
|
||||||
|
export_actor = dramatiq.actor(
|
||||||
|
realtime_export_task,
|
||||||
|
broker=broker,
|
||||||
|
queue_name=HIGH_PRIORITY_QUEUE,
|
||||||
|
priority=HIGH_PRIORITY,
|
||||||
|
)
|
||||||
|
|
||||||
|
killer = GracefulKiller()
|
||||||
|
|
||||||
|
inspect_db_session = get_inspect_session()
|
||||||
|
trace_db_session = get_trace_session()
|
||||||
|
|
||||||
|
inspector = MEVInspector(rpc)
|
||||||
|
base_provider = get_base_provider(rpc)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"inspector": inspector,
|
||||||
|
"base_provider": base_provider,
|
||||||
|
"killer": killer,
|
||||||
|
"healthcheck_url": healthcheck_url,
|
||||||
|
"inspect_db_session": inspect_db_session,
|
||||||
|
"trace_db_session": trace_db_session,
|
||||||
|
"export_actor": export_actor,
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
run()
|
run()
|
||||||
|
@ -163,8 +163,6 @@ def _get_all_start_end_swaps(swaps: List[Swap]) -> List[Tuple[Swap, List[Swap]]]
|
|||||||
if (
|
if (
|
||||||
potential_start_swap.token_in_address
|
potential_start_swap.token_in_address
|
||||||
== potential_end_swap.token_out_address
|
== potential_end_swap.token_out_address
|
||||||
and potential_start_swap.contract_address
|
|
||||||
!= potential_end_swap.contract_address
|
|
||||||
and potential_start_swap.from_address == potential_end_swap.to_address
|
and potential_start_swap.from_address == potential_end_swap.to_address
|
||||||
and not potential_start_swap.from_address in pool_addrs
|
and not potential_start_swap.from_address in pool_addrs
|
||||||
):
|
):
|
||||||
|
@ -34,7 +34,8 @@ async def create_from_block_number(
|
|||||||
_find_or_fetch_block_traces(w3, block_number, trace_db_session),
|
_find_or_fetch_block_traces(w3, block_number, trace_db_session),
|
||||||
_find_or_fetch_base_fee_per_gas(w3, block_number, trace_db_session),
|
_find_or_fetch_base_fee_per_gas(w3, block_number, trace_db_session),
|
||||||
)
|
)
|
||||||
miner_address = await _find_or_fetch_miner_address(w3, block_number, traces)
|
|
||||||
|
miner_address = _get_miner_address_from_traces(traces)
|
||||||
|
|
||||||
return Block(
|
return Block(
|
||||||
block_number=block_number,
|
block_number=block_number,
|
||||||
@ -179,27 +180,11 @@ def _find_base_fee_per_gas(
|
|||||||
return base_fee
|
return base_fee
|
||||||
|
|
||||||
|
|
||||||
async def _find_or_fetch_miner_address(
|
|
||||||
w3,
|
|
||||||
block_number: int,
|
|
||||||
traces: List[Trace],
|
|
||||||
) -> Optional[str]:
|
|
||||||
# eth1 blocks
|
|
||||||
miner_address = _get_miner_address_from_traces(traces)
|
|
||||||
if miner_address is not None:
|
|
||||||
return miner_address
|
|
||||||
return await _fetch_miner_eth2(w3, block_number)
|
|
||||||
|
|
||||||
|
|
||||||
async def _fetch_miner_eth2(w3, block_number: int) -> Optional[str]:
|
|
||||||
block_json = await w3.eth.get_block(block_number)
|
|
||||||
return block_json["miner"]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_miner_address_from_traces(traces: List[Trace]) -> Optional[str]:
|
def _get_miner_address_from_traces(traces: List[Trace]) -> Optional[str]:
|
||||||
for trace in traces:
|
for trace in traces:
|
||||||
if trace.type == TraceType.reward:
|
if trace.type == TraceType.reward:
|
||||||
return trace.action["author"]
|
return trace.action["author"]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -94,9 +94,6 @@ def create_swap_from_pool_transfers(
|
|||||||
transfer_in = transfers_to_pool[-1]
|
transfer_in = transfers_to_pool[-1]
|
||||||
transfer_out = transfers_from_pool_to_recipient[0]
|
transfer_out = transfers_from_pool_to_recipient[0]
|
||||||
|
|
||||||
if transfer_in.token_address == transfer_out.token_address:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return Swap(
|
return Swap(
|
||||||
abi_name=trace.abi_name,
|
abi_name=trace.abi_name,
|
||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
|
@ -30,9 +30,6 @@ def get_liquidations(classified_traces: List[ClassifiedTrace]) -> List[Liquidati
|
|||||||
if _is_child_liquidation(trace, parent_liquidations):
|
if _is_child_liquidation(trace, parent_liquidations):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if trace.error == "Reverted":
|
|
||||||
continue
|
|
||||||
|
|
||||||
if trace.classification == Classification.liquidate:
|
if trace.classification == Classification.liquidate:
|
||||||
|
|
||||||
parent_liquidations.append(trace)
|
parent_liquidations.append(trace)
|
||||||
|
@ -74,10 +74,7 @@ def _get_punk_bid_acceptances_for_transaction(
|
|||||||
if not isinstance(trace, DecodedCallTrace):
|
if not isinstance(trace, DecodedCallTrace):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
elif (
|
elif trace.classification == Classification.punk_accept_bid:
|
||||||
trace.classification == Classification.punk_accept_bid
|
|
||||||
and trace.error is None
|
|
||||||
):
|
|
||||||
punk_accept_bid = PunkBidAcceptance(
|
punk_accept_bid = PunkBidAcceptance(
|
||||||
block_number=trace.block_number,
|
block_number=trace.block_number,
|
||||||
transaction_hash=trace.transaction_hash,
|
transaction_hash=trace.transaction_hash,
|
||||||
|
@ -13,7 +13,7 @@ class CallResult(CamelModel):
|
|||||||
gas_used: int
|
gas_used: int
|
||||||
|
|
||||||
@validator("gas_used", pre=True)
|
@validator("gas_used", pre=True)
|
||||||
def maybe_hex_to_int(cls, v):
|
def maybe_hex_to_int(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return hex_to_int(v)
|
return hex_to_int(v)
|
||||||
return v
|
return v
|
||||||
@ -27,7 +27,7 @@ class CallAction(Web3Model):
|
|||||||
gas: int
|
gas: int
|
||||||
|
|
||||||
@validator("value", "gas", pre=True)
|
@validator("value", "gas", pre=True)
|
||||||
def maybe_hex_to_int(cls, v):
|
def maybe_hex_to_int(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return hex_to_int(v)
|
return hex_to_int(v)
|
||||||
return v
|
return v
|
||||||
|
@ -24,7 +24,7 @@ class Receipt(CamelModel):
|
|||||||
"cumulative_gas_used",
|
"cumulative_gas_used",
|
||||||
pre=True,
|
pre=True,
|
||||||
)
|
)
|
||||||
def maybe_hex_to_int(cls, v):
|
def maybe_hex_to_int(v):
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return hex_to_int(v)
|
return hex_to_int(v)
|
||||||
return v
|
return v
|
||||||
|
2654
poetry.lock
generated
2654
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -10,13 +10,12 @@ web3 = "^5.23.0"
|
|||||||
pydantic = "^1.8.2"
|
pydantic = "^1.8.2"
|
||||||
hexbytes = "^0.2.1"
|
hexbytes = "^0.2.1"
|
||||||
click = "^8.0.1"
|
click = "^8.0.1"
|
||||||
psycopg2-binary = "^2.9.7"
|
psycopg2 = "^2.9.1"
|
||||||
aiohttp = "^3.8.0"
|
aiohttp = "^3.8.0"
|
||||||
dramatiq = {extras = ["redis"], version = "^1.12.1"}
|
dramatiq = {extras = ["redis"], version = "^1.12.1"}
|
||||||
pycoingecko = "^2.2.0"
|
pycoingecko = "^2.2.0"
|
||||||
boto3 = "^1.20.48"
|
boto3 = "^1.20.48"
|
||||||
aiohttp-retry = "^2.4.6"
|
aiohttp-retry = "^2.4.6"
|
||||||
pyyaml = "^6.0.1"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pre-commit = "^2.13.0"
|
pre-commit = "^2.13.0"
|
||||||
@ -31,7 +30,6 @@ alembic = "^1.6.5"
|
|||||||
CProfileV = "^1.0.7"
|
CProfileV = "^1.0.7"
|
||||||
regex = "^2021.10.8"
|
regex = "^2021.10.8"
|
||||||
pytest-profiling = "^1.7.0"
|
pytest-profiling = "^1.7.0"
|
||||||
sqlalchemy = "^1.4.23"
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
@ -84,6 +82,3 @@ filter_files = true
|
|||||||
known_first_party = "mev_inspect"
|
known_first_party = "mev_inspect"
|
||||||
known_third_party = "alembic"
|
known_third_party = "alembic"
|
||||||
py_version=39
|
py_version=39
|
||||||
|
|
||||||
[pytest]
|
|
||||||
asyncio_mode = "auto"
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,65 +0,0 @@
|
|||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from mev_inspect.block import _find_or_fetch_miner_address
|
|
||||||
from tests.utils import load_test_block
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mocked_web3():
|
|
||||||
with patch("mev_inspect.block.Web3") as mock_web3:
|
|
||||||
yield mock_web3
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
async def test_eth1_block_miner(mocked_web3):
|
|
||||||
# Create a mock Web3 instance
|
|
||||||
mock_web3_instance = mocked_web3.return_value
|
|
||||||
|
|
||||||
# Set up the mock for web3.eth.get_block
|
|
||||||
mock_eth = mock_web3_instance.eth
|
|
||||||
mock_eth.get_block.return_value = {
|
|
||||||
"miner": "0x4a536c1f6a5d5a9c1aeca9f6d04fbbf5f0d8f4e3"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Load a sample block and remove the miner
|
|
||||||
block_number = 10921991
|
|
||||||
block = load_test_block(block_number)
|
|
||||||
block.miner = None
|
|
||||||
|
|
||||||
# Test that the miner is fetched
|
|
||||||
miner_address = await _find_or_fetch_miner_address(
|
|
||||||
w3=mock_web3_instance, traces=block.traces, block_number=block_number
|
|
||||||
) # Use 'await'
|
|
||||||
|
|
||||||
# this is within the traces object
|
|
||||||
assert miner_address == "0x52bc44d5378309ee2abf1539bf71de1b7d7be3b5"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
async def test_eth2_block_miner(mocked_web3):
|
|
||||||
# Create a mock Web3 instance
|
|
||||||
mock_web3_instance = mocked_web3.return_value
|
|
||||||
|
|
||||||
# Create a coroutine function to mock w3.eth.get_block
|
|
||||||
# pylint: disable=unused-argument
|
|
||||||
async def mock_get_block(block_number):
|
|
||||||
return {"miner": "0x4a536c1f6a5d5a9c1aeca9f6d04fbbf5f0d8f4e3"}
|
|
||||||
|
|
||||||
# Mock w3.eth.get_block with the coroutine function
|
|
||||||
mock_web3_instance.eth.get_block = MagicMock(side_effect=mock_get_block)
|
|
||||||
|
|
||||||
# Load a sample block and remove the miner
|
|
||||||
block_number = 10921990
|
|
||||||
block = load_test_block(block_number)
|
|
||||||
block.miner = None
|
|
||||||
|
|
||||||
# Test that the miner is fetched
|
|
||||||
miner_address = await _find_or_fetch_miner_address(
|
|
||||||
w3=mock_web3_instance, traces=block.traces, block_number=block_number
|
|
||||||
) # Use 'await'
|
|
||||||
|
|
||||||
assert miner_address == "0x4a536c1f6a5d5a9c1aeca9f6d04fbbf5f0d8f4e3"
|
|
@ -115,18 +115,3 @@ def test_c_token_liquidation(trace_classifier: TraceClassifier):
|
|||||||
|
|
||||||
for liquidation in liquidations:
|
for liquidation in liquidations:
|
||||||
assert liquidation in result
|
assert liquidation in result
|
||||||
|
|
||||||
|
|
||||||
def test_reverted_liquidation(trace_classifier: TraceClassifier):
|
|
||||||
block_number = 15049646
|
|
||||||
transaction_hash = (
|
|
||||||
"0x6dd0d8be8a77651f64ef399b47fbc87011bd796b43349c3164ff7da965e0b345"
|
|
||||||
)
|
|
||||||
|
|
||||||
block = load_test_block(block_number)
|
|
||||||
classified_traces = trace_classifier.classify(block.traces)
|
|
||||||
result = get_liquidations(classified_traces)
|
|
||||||
|
|
||||||
assert transaction_hash not in [
|
|
||||||
liquidation.transaction_hash for liquidation in result
|
|
||||||
]
|
|
||||||
|
@ -2,6 +2,8 @@ import json
|
|||||||
import os
|
import os
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from pydantic import parse_file_as
|
||||||
|
|
||||||
from mev_inspect.schemas.blocks import Block
|
from mev_inspect.schemas.blocks import Block
|
||||||
from mev_inspect.schemas.sandwiches import Sandwich
|
from mev_inspect.schemas.sandwiches import Sandwich
|
||||||
|
|
||||||
@ -12,10 +14,7 @@ TEST_SANDWICHES_DIRECTORY = os.path.join(THIS_FILE_DIRECTORY, "sandwiches")
|
|||||||
|
|
||||||
def load_test_sandwiches(block_number: int) -> List[Sandwich]:
|
def load_test_sandwiches(block_number: int) -> List[Sandwich]:
|
||||||
sandwiches_path = f"{TEST_SANDWICHES_DIRECTORY}/{block_number}.json"
|
sandwiches_path = f"{TEST_SANDWICHES_DIRECTORY}/{block_number}.json"
|
||||||
|
return parse_file_as(List[Sandwich], sandwiches_path)
|
||||||
with open(sandwiches_path, "r") as file:
|
|
||||||
sandwiches_data = json.load(file)
|
|
||||||
return [Sandwich(**sandwich) for sandwich in sandwiches_data]
|
|
||||||
|
|
||||||
|
|
||||||
def load_test_block(block_number: int) -> Block:
|
def load_test_block(block_number: int) -> Block:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user