Compare commits
1 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
032bd0a339 |
@ -1 +0,0 @@
|
||||
cache
|
10
.env
Normal file
10
.env
Normal file
@ -0,0 +1,10 @@
|
||||
# Postgres
|
||||
POSTGRES_SERVER=db
|
||||
POSTGRES_USER=postgres
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_DB=mev_inspect
|
||||
|
||||
# PgAdmin
|
||||
PGADMIN_LISTEN_PORT=5050
|
||||
PGADMIN_DEFAULT_EMAIL=admin@example.com
|
||||
PGADMIN_DEFAULT_PASSWORD=password
|
6
.github/workflows/github-actions.yml
vendored
6
.github/workflows/github-actions.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
- name: Bootstrap poetry
|
||||
shell: bash
|
||||
run: |
|
||||
curl -sSL https://install.python-poetry.org \
|
||||
curl -sL https://raw.githubusercontent.com/python-poetry/poetry/master/install-poetry.py \
|
||||
| python - -y
|
||||
|
||||
- name: Update PATH
|
||||
@ -51,8 +51,8 @@ jobs:
|
||||
|
||||
- name: Run precommit
|
||||
run: |
|
||||
poetry run pre-commit run --all-files
|
||||
poetry run pre-commit
|
||||
|
||||
- name: Test with pytest
|
||||
shell: bash
|
||||
run: poetry run pytest --cov=mev_inspect tests
|
||||
run: poetry run test
|
||||
|
15
.gitignore
vendored
15
.gitignore
vendored
@ -13,18 +13,3 @@ __pycache__
|
||||
# coverage
|
||||
htmlcov
|
||||
.coverage*
|
||||
|
||||
# don't commit cache
|
||||
cache
|
||||
|
||||
# k8s
|
||||
.helm
|
||||
|
||||
# env
|
||||
.envrc
|
||||
|
||||
# pycharm
|
||||
.idea
|
||||
|
||||
.env
|
||||
.python-version
|
||||
|
@ -1,28 +1,20 @@
|
||||
repos:
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 22.3.0
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.9
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort
|
||||
entry: poetry run isort .
|
||||
language: system
|
||||
types: [python]
|
||||
- id: black
|
||||
language_version: python3.9
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: poetry run pylint
|
||||
entry: python -m pylint.__main__
|
||||
args: ['--rcfile=.pylintrc', --disable=redefined-builtin]
|
||||
language: system
|
||||
types: [python]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.942
|
||||
rev: v0.910
|
||||
hooks:
|
||||
- id: 'mypy'
|
||||
additional_dependencies:
|
||||
- 'pydantic'
|
||||
- 'types-requests'
|
||||
|
@ -433,7 +433,7 @@ int-import-graph=
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=alembic
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
@ -1,36 +0,0 @@
|
||||
# Contributing guide
|
||||
|
||||
Welcome to the Flashbots collective! We just ask you to be nice when you play with us.
|
||||
|
||||
## Pre-commit
|
||||
|
||||
We use pre-commit to maintain a consistent style, prevent errors, and ensure test coverage.
|
||||
|
||||
To set up, install dependencies through `poetry`:
|
||||
|
||||
```
|
||||
poetry install
|
||||
```
|
||||
|
||||
Then install pre-commit hooks with:
|
||||
|
||||
```
|
||||
poetry run pre-commit install
|
||||
```
|
||||
|
||||
## Tests
|
||||
|
||||
Run tests with:
|
||||
|
||||
```
|
||||
./mev test
|
||||
```
|
||||
|
||||
## Send a pull request
|
||||
|
||||
- Your proposed changes should be first described and discussed in an issue.
|
||||
- Open the branch in a personal fork, not in the team repository.
|
||||
- Every pull request should be small and represent a single change. If the problem is complicated, split it in multiple issues and pull requests.
|
||||
- Every pull request should be covered by unit tests.
|
||||
|
||||
We appreciate you, friend <3.
|
28
Dockerfile
28
Dockerfile
@ -1,29 +1,19 @@
|
||||
FROM python:3.9-slim-buster
|
||||
FROM python:3.9
|
||||
|
||||
ENV POETRY_VERSION=1.1.12
|
||||
|
||||
RUN useradd --create-home flashbot \
|
||||
RUN pip install -U pip \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends build-essential libffi-dev libpq-dev gcc procps \
|
||||
&& pip install poetry==$POETRY_VERSION \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
&& curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python -
|
||||
|
||||
ENV PATH="${PATH}:/home/flashbot/.local/bin"
|
||||
ENV PATH="${PATH}:/root/.poetry/bin"
|
||||
|
||||
COPY --chown=flashbot ./pyproject.toml /app/pyproject.toml
|
||||
COPY --chown=flashbot ./poetry.lock /app/poetry.lock
|
||||
COPY . /app
|
||||
WORKDIR /app/
|
||||
|
||||
USER flashbot
|
||||
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install
|
||||
|
||||
COPY --chown=flashbot . /app
|
||||
# poetry uses virtual env by default, turn this off inside container
|
||||
RUN poetry config virtualenvs.create false && \
|
||||
poetry install
|
||||
|
||||
# easter eggs 😝
|
||||
RUN echo "PS1='🕵️:\[\033[1;36m\]\h \[\033[1;34m\]\W\[\033[0;35m\]\[\033[1;36m\]$ \[\033[0m\]'" >> ~/.bashrc
|
||||
|
||||
ENTRYPOINT [ "poetry" ]
|
||||
CMD [ "run", "python", "loop.py" ]
|
||||
CMD /bin/bash
|
||||
|
110
GUIDE.md
Normal file
110
GUIDE.md
Normal file
@ -0,0 +1,110 @@
|
||||
# Contributor guide
|
||||
|
||||
### Requirements
|
||||
|
||||
* [Install](https://docs.docker.com/compose/install/) docker compose
|
||||
* To run `mev-inspect`, `postgres`, and `pgadmin` within a local container.
|
||||
* Python
|
||||
* Our pre-commit hook requires v3.9, use pyenv to manage versions and venv, instructions [here](https://www.andreagrandi.it/2020/10/10/install-python-with-pyenv-create-virtual-environment-with-specific-python-version/).
|
||||
* Verify with `pre-commit install && pre-commit run --all-files`
|
||||
* Archive node with `trace_*` rpc module (Erigon/OpenEthereum)
|
||||
|
||||
* If you do not have access to an archive node, reach out to us on our [discord](https://discord.gg/5NB53YEGVM) for raw traces (of the blocks with MEV you're writing inspectors for) or an rpc endpoint.
|
||||
### Quick start
|
||||
|
||||
We use poetry for python package management, start with installing the required libraries:
|
||||
* `poetry install`
|
||||
|
||||
Build the container:
|
||||
* `poetry run build`
|
||||
|
||||
Run a specifc inspector:
|
||||
* `poetry run inspect -script ./examples/uniswap_inspect.py -block_number 12901446 -rpc 'http://localhost:8545'`
|
||||
|
||||
Or directly using docker:
|
||||
* `docker compose exec mev-inspect python testing_file.py -block_number 12901446 -rpc 'http://localhost:8545'`
|
||||
|
||||
You will be able to run all the inspectors against a specific transaction, block, and range of blocks once we finalize our data model/architecture but for now, write a protocol specifc inspector script and verify against a test block (with the MEV you're trying to quantify).
|
||||
|
||||
Full list of poetry commands for this repo can be found [here](https://github.com/flashbots/mev-inspect-py#poetry-scripts).
|
||||
|
||||
|
||||
### Tracing
|
||||
|
||||
While simple ETH and token transfers are trivial to parse/filter (by processing their transaction input data, events and/or receipts), contract interactions can be complex to identify. EVM tracing allows us to dig deeper into the transaction execution cycle to look through the internal calls and any other additional proxy contracts the tx interacts with.
|
||||
|
||||
Trace types (by `action_type`):
|
||||
|
||||
* `Call`, which is returned when a method on a contract (same as the tx `to` field or a different one within) is executed. We can identify the input parameters in each instance by looking at this sub trace.
|
||||
* `Self-destruct`, when a contract destroys the code at its address and transfers the ETH held in the contract to an EOA. Common pattern among arbitrage bots given the gas refund savings.
|
||||
* `Create`, when a contract deploys another contract and transfers assets to it.
|
||||
* `Reward`, pertaining to the block reward and uncle reward, not relevant here.
|
||||
|
||||
Note that this is for Erigon/OpenEthereum `trace` module and Geth has a different tracing mechanism that is more low-level/irrelevant for inspect.
|
||||
|
||||
### Architecture
|
||||
|
||||
TODO: Actions, inspectors, reducers context
|
||||
|
||||
TODO: Single tx vs multi tx context
|
||||
|
||||
#### Inspectors
|
||||
|
||||
TODO: list done/wip/current
|
||||
|
||||
#### Tokenflow
|
||||
|
||||
The method iterates over all the traces and makes a note of all the ETH inflows/outflows as well as stablecoins (USDT/USDC/DAI) for the `eoa`, `contract`, `proxy`. Once it is done, it finds out net profit by subtracting the gas spent from the MEV revenue. All profits will be converted to ETH, based on the exchange rate at that block height.
|
||||
|
||||
Example: https://etherscan.io/tx/0x4121ce805d33e952b2e6103a5024f70c118432fd0370128d6d7845f9b2987922
|
||||
|
||||
ETH=>ENG=>ETH across DEXs
|
||||
|
||||
Script output:
|
||||
EOA: 0x00000098163d8908dfbd126c873c9c4732a2c2e6
|
||||
Contract: 0x000000000000006f6502b7f2bbac8c30a3f67e9a
|
||||
Tx proxy: 0x0000000000000000000000000000000000000000
|
||||
Stablecoins inflow/outflow: [0, 0]
|
||||
Net ETH profit, Wei 22357881284770142
|
||||
|
||||
#### Database
|
||||
|
||||
Final `mev_inspections` table schema:
|
||||
|
||||
* As of `mev-inspect-rs`:
|
||||
* hash
|
||||
* status
|
||||
* `Success` or `Reverted`
|
||||
* block_number
|
||||
* gas_price
|
||||
* revenue
|
||||
* Revenue searcher makes after accounting for gas used.
|
||||
* protocols
|
||||
* Different protocols that we identify the transaction to touch
|
||||
* actions
|
||||
* Different relevant actions parsed from the transaction traces
|
||||
* eoa
|
||||
* EOA address that initiates the transaction
|
||||
* contract
|
||||
* `to` field, either a custom contract utilized for a searcher to capture MEV or a simple router
|
||||
* proxy_impl
|
||||
* Proxy implementations used by searchers, if any
|
||||
* inserted_at
|
||||
|
||||
Additional fields we're interested in:
|
||||
* miner
|
||||
* Coinbase address of the block miner
|
||||
* eth_usd_price
|
||||
* Price of ETH that block height
|
||||
* Similarly, for any tokens (say in an arbitrage inspection) we query against the relevant uniswap pools.
|
||||
* tail_gas_price
|
||||
* Gas price of the transaction displaced in the block (last tx that would've otherwise)
|
||||
* token_flow_estimate
|
||||
* Profit outputted by the token flow function
|
||||
* delta
|
||||
* Difference between profit estimated by our inspectors and pure token flow analysis
|
||||
|
||||
|
||||
[Creating an inspector from scratch](./CreateInspector.md)
|
||||
|
||||
|
21
LICENSE
21
LICENSE
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Flashbots
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
126
MONOLITHIC.md
126
MONOLITHIC.md
@ -1,126 +0,0 @@
|
||||
|
||||
# Running mev-inspect-py without kubernetes ('monolithic mode')
|
||||
|
||||
Running mev-inspect-py outside of kubernetes can be useful for debug purposes. In this case, the steps for installation are:
|
||||
1. Install dependencies (pyenv, poetry, postgres)
|
||||
1. Set up python virtual environment using matching python version (3.9.x) and install required python modules using poetry
|
||||
1. Create postgres database
|
||||
1. Run database migrations
|
||||
|
||||
The database credentials and archive node address used by mev-inspect-py need to be loaded into environment variables (both for database migrations and to run mev-inspect-py).
|
||||
|
||||
## Ubuntu install instructions
|
||||
|
||||
So, starting from a clean Ubuntu 22.04 installation, the prerequisites for pyenv, psycopg2 (python3-dev libpq-dev) can be installed with
|
||||
|
||||
`sudo apt install -y make build-essential git libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev liblzma-dev python3-dev libpq-dev`
|
||||
|
||||
### pyenv
|
||||
Install pyenv using the web installer
|
||||
|
||||
`curl https://pyenv.run | bash`
|
||||
|
||||
and add the following to `~/.bashrc` (if running locally) or `~/.profile` (if running over ssh).
|
||||
|
||||
```
|
||||
export PYENV_ROOT="$HOME/.pyenv"
|
||||
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
|
||||
eval "$(pyenv init -)"
|
||||
```
|
||||
|
||||
Then update the current shell by running `source ~/.bashrc` or `source ~/.profile` as appropriate.
|
||||
|
||||
### Poetry
|
||||
|
||||
Install Poetry using the web installer
|
||||
|
||||
`curl -sSL https://install.python-poetry.org | python3 -`
|
||||
|
||||
add the following to `~/.bashrc` (if running locally) or `~/.profile` (if running over ssh)
|
||||
|
||||
`export PATH="/home/user/.local/bin:$PATH"`
|
||||
|
||||
If running over ssh you should also add the following to `~/.profile` to prevent [Poetry errors](https://github.com/python-poetry/poetry/issues/1917) from a lack of active keyring:
|
||||
|
||||
`export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring`
|
||||
|
||||
Again update current shell by running `source ~/.bashrc` or `source ~/.profile` as appropriate.
|
||||
|
||||
### postgres
|
||||
We have tested two alternatives for postgres - installing locally or as a container.
|
||||
|
||||
#### Option 1: Installing locally
|
||||
|
||||
To install locally from a clean Ubuntu 22.04 installation, run:
|
||||
`sudo apt install postgresql postgresql-contrib`
|
||||
|
||||
Note: You may need to reconfigure your pg-hba.conf to allow local access.
|
||||
|
||||
#### Option 2: Installing docker
|
||||
|
||||
To avoid interfering with your local postgres instance, you may prefer to run postgres within a docker container.
|
||||
For docker installation instructions, please refer to https://docs.docker.com/engine/install/ubuntu/
|
||||
|
||||
### mev-inspect-py
|
||||
|
||||
With all dependencies now installed, clone the mev-inspec-py repo
|
||||
```
|
||||
git clone https://github.com/flashbots/mev-inspect-py.git
|
||||
cd mev-inspect-py
|
||||
```
|
||||
We now install the required pythn version and use Poetry to install the required python modules into a virtual environment.
|
||||
|
||||
```
|
||||
pyenv install 3.9.16
|
||||
pyenv local 3.9.16
|
||||
poetry env use 3.9.16
|
||||
poetry install
|
||||
```
|
||||
|
||||
### Create database
|
||||
mev-inspect-py outputs to a postgres database, so we need to set this up. There are various ways of doing this, two options are presented here.
|
||||
|
||||
#### Option 1 — Run postgres locally
|
||||
```
|
||||
sudo -u postgres psql
|
||||
\password
|
||||
postgres
|
||||
create database mev_inspect;
|
||||
\q
|
||||
```
|
||||
|
||||
#### Option 2 — Use postgres docker image
|
||||
To avoid interfering with your local postgres instance, you may prefer to run postgres within a docker container. First ensure that postgres is not currently running to ensure port `5432` is available:
|
||||
`sudo systemctl stop postgresql`
|
||||
and then start a containerised postgres instance:
|
||||
`sudo docker run -d -p 5432:5432 -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=mev_inspect postgres`
|
||||
|
||||
### Environment variables
|
||||
We will need to set a few environment variables to use mev-inspect-py. **These will be required every time mev-inspect-py runs**, so again you may wish to add these to your `~/.bashrc` and/or `~/.profile` as appropriate. Note that you need to substitute the correct URL for your archive node below if you are not running Erigon locally.
|
||||
```
|
||||
export POSTGRES_USER=postgres
|
||||
export POSTGRES_PASSWORD=postgres
|
||||
export POSTGRES_HOST=localhost
|
||||
export RPC_URL="http://127.0.0.1:8545"
|
||||
```
|
||||
### Database migrations
|
||||
Finally run the database migrations and fetch price information:
|
||||
|
||||
```
|
||||
poetry run alembic upgrade head
|
||||
poetry run fetch-all-prices
|
||||
```
|
||||
|
||||
## Usage instructions
|
||||
The same functionality available through kubernetes can be run in 'monolithic mode', but the relevant functions now need to be invoked by Poetry directly. So to inspect a single block, run for example:
|
||||
|
||||
`poetry run inspect-block 16379706`
|
||||
|
||||
Or to inspect a range of blocks:
|
||||
|
||||
`poetry run inspect-many-blocks 16379606 16379706`
|
||||
|
||||
Or to run the test suite:
|
||||
|
||||
`poetry run pytest tests`
|
||||
|
373
README.md
373
README.md
@ -1,306 +1,109 @@
|
||||
⚠️ This tool has been deprecated. You can visit [Flashbots Data](https://datasets.flashbots.net/) for historical mev-inspect data on Ethereum and join us on the [Flashbots forum](https://collective.flashbots.net). ⚠️
|
||||
# mev-inspect
|
||||
A [WIP] Ethereum MEV Inspector in Python managed by Poetry
|
||||
|
||||
# mev-inspect-py
|
||||
## Containers
|
||||
mev-inspect's local setup is built on [Docker Compose](https://docs.docker.com/compose/)
|
||||
|
||||
[](https://github.com/RichardLitt/standard-readme)
|
||||
[](https://discord.gg/7hvTycdNcK)
|
||||
By default it starts up:
|
||||
- `mev-insepct` - a container with the code in this repo used for running scripts
|
||||
- `db` - a postgres database instance
|
||||
- `pgadmin` - a postgres DB UI for querying and more (avaiable at localhost:5050)
|
||||
|
||||
[Maximal extractable value](https://ethereum.org/en/developers/docs/mev/) inspector for Ethereum, to illuminate the [dark forest](https://www.paradigm.xyz/2020/08/ethereum-is-a-dark-forest/) 🌲💡
|
||||
|
||||
Given a block, mev-inspect finds:
|
||||
- miner payments (gas + coinbase)
|
||||
- tokens transfers and profit
|
||||
- swaps and [arbitrages](https://twitter.com/bertcmiller/status/1427632028263059462)
|
||||
- ...and more
|
||||
|
||||
Data is stored in Postgres for analysis.
|
||||
|
||||
## Install
|
||||
|
||||
mev-inspect-py is built to run on kubernetes locally and in production.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- [docker](https://www.docker.com/products/docker-desktop)
|
||||
- [kind](https://kind.sigs.k8s.io/docs/user/quick-start), or a similar tool for running local Kubernetes clusters
|
||||
- [kubectl](https://kubernetes.io/docs/tasks/tools/)
|
||||
- [helm](https://helm.sh/docs/intro/install/)
|
||||
- [tilt](https://docs.tilt.dev/install.html)
|
||||
|
||||
### Set up
|
||||
|
||||
Create a new cluster with:
|
||||
## Running locally
|
||||
Setup [Docker](https://www.docker.com/products/docker-desktop)
|
||||
|
||||
Start the services (optionally as background processes)
|
||||
```
|
||||
kind create cluster
|
||||
poetry run start [-b]
|
||||
```
|
||||
|
||||
Set an environment variable `RPC_URL` to an RPC for fetching blocks.
|
||||
|
||||
mev-inspect-py currently requires a node with support for Erigon traces and receipts (not geth yet 😔).
|
||||
|
||||
[pokt.network](https://www.pokt.network/)'s "Ethereum Mainnet Archival with trace calls" is a good hosted option.
|
||||
|
||||
Example:
|
||||
|
||||
To stop the services (if running in the background, otherwise just ctrl+c)
|
||||
```
|
||||
export RPC_URL="http://111.111.111.111:8546"
|
||||
poetry run stop
|
||||
```
|
||||
|
||||
MEV container can be attached via
|
||||
```
|
||||
poetry run attach
|
||||
```
|
||||
|
||||
Running additional compose commands are possible through standard `docker
|
||||
compose ...` calls. Check `docker compose help` for more tools available
|
||||
|
||||
## Executing scripts
|
||||
Inspection is the only simplified api available through poetry at the moment
|
||||
with a more generalized api on the horizon.
|
||||
|
||||
Inspect scripts must have `-script`, `-block_number` and `-rpc` arguments.
|
||||
Using the uniswap inspect from `./examples`
|
||||
```
|
||||
poetry run inspect -script ./examples/uniswap_inspect.py -block_number 11931271 \
|
||||
-rpc 'http://111.11.11.111:8545'
|
||||
```
|
||||
|
||||
Generalized user defined scripts can still be run through the docker interface as
|
||||
```
|
||||
docker compose exec mev-inspect python testing_file.py \
|
||||
-block_number 11931271 \
|
||||
-rpc 'http://111.11.11.111:8545'
|
||||
```
|
||||
### Poetry Scripts
|
||||
```bash
|
||||
# code check
|
||||
poetry run lint # linting via Pylint
|
||||
poetry run test # testing and code coverage with Pytest
|
||||
poetry run isort # fixing imports
|
||||
poetry run mypy # type checking
|
||||
poetry run black # style guide
|
||||
poetry run pre-commit # runs Black, PyLint and MyPy
|
||||
# docker management
|
||||
poetry run start [-b] # starts all services, optionally in the background
|
||||
poetry run stop # shutsdown all services or just ctrl + c if foreground
|
||||
poetry run build # rebuilds containers
|
||||
poetry run attach # enters the mev-inspect container in interactive mode
|
||||
# launches inspection script
|
||||
poetry run inspect -script ... -block_number ... -rpc ...
|
||||
```
|
||||
|
||||
|
||||
Next, start all services with:
|
||||
|
||||
## Rebuilding containers
|
||||
After changes to the app's Dockerfile, rebuild with
|
||||
```
|
||||
tilt up
|
||||
poetry run build
|
||||
```
|
||||
|
||||
Press "space" to see a browser of the services starting up.
|
||||
## Using PGAdmin
|
||||
|
||||
On first startup, you'll need to apply database migrations with:
|
||||
1. Go to [localhost:5050](localhost:5050)
|
||||
|
||||
```
|
||||
./mev exec alembic upgrade head
|
||||
```
|
||||
2. Login with the PGAdmin username and password in `.env`
|
||||
|
||||
And load prices data
|
||||
|
||||
```
|
||||
./mev prices fetch-all
|
||||
```
|
||||
|
||||
## Monolithic (non-kubernetes) install instructions
|
||||
|
||||
For an alternative means of running mev-inspect-py for smaller set-ups or debug purposes see the [monolithic install instructions](MONOLITHIC.md).
|
||||
|
||||
## Usage
|
||||
|
||||
### Inspect a single block
|
||||
|
||||
Inspecting block [12914944](https://twitter.com/mevalphaleak/status/1420416437575901185):
|
||||
|
||||
```
|
||||
./mev inspect 12914944
|
||||
```
|
||||
|
||||
### Inspect many blocks
|
||||
|
||||
Inspecting blocks 12914944 to 12914954:
|
||||
|
||||
```
|
||||
./mev inspect-many 12914944 12914954
|
||||
```
|
||||
|
||||
### Inspect all incoming blocks
|
||||
|
||||
Start a block listener with:
|
||||
|
||||
```
|
||||
./mev listener start
|
||||
```
|
||||
|
||||
By default, it will pick up wherever you left off.
|
||||
If running for the first time, listener starts at the latest block.
|
||||
|
||||
Tail logs for the listener with:
|
||||
|
||||
```
|
||||
./mev listener tail
|
||||
```
|
||||
|
||||
And stop the listener with:
|
||||
|
||||
```
|
||||
./mev listener stop
|
||||
```
|
||||
|
||||
### Backfilling
|
||||
|
||||
For larger backfills, you can inspect many blocks in parallel
|
||||
|
||||
To inspect blocks 12914944 to 12915044, run
|
||||
```
|
||||
./mev backfill 12914944 12915044
|
||||
```
|
||||
|
||||
This queues the blocks in Redis to be pulled off by the mev-inspect-worker service
|
||||
|
||||
To increase or decrease parallelism, update the replicaCount value for the mev-inspect-workers helm chart
|
||||
|
||||
Locally, this can be done by editing Tiltfile and changing "replicaCount=1" to your desired parallelism:
|
||||
```
|
||||
k8s_yaml(helm(
|
||||
'./k8s/mev-inspect-workers',
|
||||
name='mev-inspect-workers',
|
||||
set=["replicaCount=1"],
|
||||
))
|
||||
```
|
||||
|
||||
You can see worker pods spin up then complete by watching the status of all pods
|
||||
```
|
||||
watch kubectl get pods
|
||||
```
|
||||
|
||||
To see progress and failed batches, connect to Redis with
|
||||
```
|
||||
./mev redis
|
||||
```
|
||||
|
||||
For total messages, query:
|
||||
```
|
||||
HLEN dramatiq:default.msgs
|
||||
```
|
||||
|
||||
For messages failed and waiting to retry in the delay queue (DQ), query:
|
||||
```
|
||||
HGETALL dramatiq:default.DQ.msgs
|
||||
```
|
||||
|
||||
For messages permanently failed in the dead letter queue (XQ), query:
|
||||
```
|
||||
HGETALL dramatiq:default.XQ.msgs
|
||||
```
|
||||
|
||||
To clear the queue, delete keys for the main queue and delay queue
|
||||
```
|
||||
DEL dramatiq:default.msgs
|
||||
DEL dramatiq:default.DQ.msgs
|
||||
```
|
||||
|
||||
For more information on queues, see the [spec shared by dramatiq](https://github.com/Bogdanp/dramatiq/blob/24cbc0dc551797783f41b08ea461e1b5d23a4058/dramatiq/brokers/redis/dispatch.lua#L24-L43)
|
||||
|
||||
**Backfilling a list of blocks**
|
||||
|
||||
Create a file containing a block per row, for example blocks.txt containing:
|
||||
```
|
||||
12500000
|
||||
12500001
|
||||
12500002
|
||||
```
|
||||
|
||||
Then queue the blocks with
|
||||
```
|
||||
cat blocks.txt | ./mev block-list
|
||||
```
|
||||
|
||||
To watch the logs for a given worker pod, take its pod name using the above, then run:
|
||||
```
|
||||
kubectl logs -f pod/mev-inspect-worker-abcdefg
|
||||
```
|
||||
|
||||
(where `mev-inspect-worker-abcdefg` is your actual pod name)
|
||||
|
||||
|
||||
### Exploring
|
||||
|
||||
All inspect output data is stored in Postgres.
|
||||
|
||||
To connect to the local Postgres database for querying, launch a client container with:
|
||||
|
||||
```
|
||||
./mev db
|
||||
```
|
||||
|
||||
When you see the prompt:
|
||||
|
||||
```
|
||||
mev_inspect=#
|
||||
```
|
||||
|
||||
You're ready to query!
|
||||
|
||||
Try finding the total number of swaps decoded with UniswapV3Pool:
|
||||
|
||||
```
|
||||
SELECT COUNT(*) FROM swaps WHERE abi_name='UniswapV3Pool';
|
||||
```
|
||||
|
||||
or top 10 arbs by gross profit that took profit in WETH:
|
||||
|
||||
```
|
||||
SELECT *
|
||||
FROM arbitrages
|
||||
WHERE profit_token_address = '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2'
|
||||
ORDER BY profit_amount DESC
|
||||
LIMIT 10;
|
||||
```
|
||||
|
||||
Postgres tip: Enter `\x` to enter "Explanded display" mode which looks nicer for results with many columns.
|
||||
|
||||
## FAQ
|
||||
|
||||
### How do I delete / reset my local postgres data?
|
||||
|
||||
Stop the system if running:
|
||||
|
||||
```
|
||||
tilt down
|
||||
```
|
||||
|
||||
Delete it with:
|
||||
|
||||
```
|
||||
kubectl delete pvc data-postgresql-postgresql-0
|
||||
```
|
||||
|
||||
Start back up again:
|
||||
|
||||
```
|
||||
tilt up
|
||||
```
|
||||
|
||||
And rerun migrations to create the tables again:
|
||||
|
||||
```
|
||||
./mev exec alembic upgrade head
|
||||
```
|
||||
|
||||
### I was using the docker-compose setup and want to switch to kube, now what?
|
||||
|
||||
Re-add the old `docker-compose.yml` file to your mev-inspect-py directory.
|
||||
|
||||
A copy can be found [here](https://github.com/flashbots/mev-inspect-py/blob/ef60c097719629a7d2dc56c6e6c9a100fb706f76/docker-compose.yml)
|
||||
|
||||
Tear down docker-compose resources:
|
||||
|
||||
```
|
||||
docker compose down
|
||||
```
|
||||
|
||||
Then go through the steps in the current README for kube setup.
|
||||
|
||||
### Error from server (AlreadyExists): pods "postgres-client" already exists
|
||||
|
||||
This means the postgres client container didn't shut down correctly.
|
||||
|
||||
Delete this one with:
|
||||
|
||||
```
|
||||
kubectl delete pod/postgres-client
|
||||
```
|
||||
|
||||
Then start it back up again.
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [@lukevs](https://github.com/lukevs)
|
||||
- [@gheise](https://github.com/gheise)
|
||||
- [@bertmiller](https://github.com/bertmiller)
|
||||
3. Add a new engine for mev_inspect with
|
||||
- host: db
|
||||
- user / password: see `.env`
|
||||
|
||||
## Contributing
|
||||
Development can be done locally or in the docker container. Use local if
|
||||
contributions can be fully tested without invoking the database related
|
||||
services.
|
||||
|
||||
[Flashbots](https://flashbots.net) is a research and development collective working on mitigating the negative externalities of decentralized economies. We contribute with the larger free software community to illuminate the dark forest.
|
||||
1. Install dependencies and build python environment
|
||||
```
|
||||
poetry install
|
||||
```
|
||||
or with docker
|
||||
```
|
||||
poetry run build
|
||||
```
|
||||
2. Pre-commit is used to maintain a consistent style, prevent errors and ensure
|
||||
test coverage. Make sure to fix any errors presented via Black, Pylint and
|
||||
MyPy pre-commit hooks
|
||||
```
|
||||
poetry run pre-commit
|
||||
```
|
||||
|
||||
You are welcome here <3.
|
||||
|
||||
- If you want to join us, come and say hi in our [Discord chat](https://discord.gg/7hvTycdNcK).
|
||||
- If you have a question, feedback or a bug report for this project, please [open a new Issue](https://github.com/flashbots/mev-inspect-py/issues).
|
||||
- If you would like to contribute with code, check the [CONTRIBUTING file](CONTRIBUTING.md).
|
||||
- We just ask you to be nice.
|
||||
|
||||
## Security
|
||||
|
||||
If you find a security vulnerability on this project or any other initiative related to Flashbots, please let us know sending an email to security@flashbots.net.
|
||||
|
||||
---
|
||||
|
||||
Made with ☀️ by the ⚡🤖 collective.
|
||||
or within docker
|
||||
```
|
||||
pre-commit run --all-files
|
||||
```
|
||||
3. Update README if needed
|
||||
|
119
Tiltfile
119
Tiltfile
@ -1,119 +0,0 @@
|
||||
load("ext://helm_remote", "helm_remote")
|
||||
load("ext://secret", "secret_from_dict")
|
||||
load("ext://configmap", "configmap_from_dict")
|
||||
|
||||
helm_remote("postgresql",
|
||||
repo_name="bitnami",
|
||||
repo_url="https://charts.bitnami.com/bitnami",
|
||||
set=["auth.postgresPassword=password", "auth.database=mev_inspect"],
|
||||
)
|
||||
|
||||
helm_remote("redis",
|
||||
repo_name="bitnami",
|
||||
repo_url="https://charts.bitnami.com/bitnami",
|
||||
set=["global.redis.password=password"],
|
||||
)
|
||||
|
||||
k8s_yaml(configmap_from_dict("mev-inspect-rpc", inputs = {
|
||||
"url" : os.environ["RPC_URL"],
|
||||
}))
|
||||
|
||||
k8s_yaml(configmap_from_dict("mev-inspect-listener-healthcheck", inputs = {
|
||||
"url" : os.getenv("LISTENER_HEALTHCHECK_URL", default=""),
|
||||
}))
|
||||
|
||||
k8s_yaml(secret_from_dict("mev-inspect-db-credentials", inputs = {
|
||||
"username" : "postgres",
|
||||
"password": "password",
|
||||
"host": "postgresql",
|
||||
}))
|
||||
|
||||
# if using https://github.com/taarushv/trace-db
|
||||
# k8s_yaml(secret_from_dict("trace-db-credentials", inputs = {
|
||||
# "username" : "username",
|
||||
# "password": "password",
|
||||
# "host": "trace-db-postgresql",
|
||||
# }))
|
||||
|
||||
docker_build("mev-inspect-py", ".",
|
||||
live_update=[
|
||||
sync(".", "/app"),
|
||||
run("cd /app && poetry install",
|
||||
trigger="./pyproject.toml"),
|
||||
],
|
||||
)
|
||||
|
||||
k8s_yaml(helm(
|
||||
'./k8s/mev-inspect',
|
||||
name='mev-inspect',
|
||||
set=[
|
||||
"extraEnv[0].name=AWS_ACCESS_KEY_ID",
|
||||
"extraEnv[0].value=foobar",
|
||||
"extraEnv[1].name=AWS_SECRET_ACCESS_KEY",
|
||||
"extraEnv[1].value=foobar",
|
||||
"extraEnv[2].name=AWS_REGION",
|
||||
"extraEnv[2].value=us-east-1",
|
||||
"extraEnv[3].name=AWS_ENDPOINT_URL",
|
||||
"extraEnv[3].value=http://localstack:4566",
|
||||
],
|
||||
))
|
||||
|
||||
k8s_yaml(helm(
|
||||
'./k8s/mev-inspect-workers',
|
||||
name='mev-inspect-workers',
|
||||
set=[
|
||||
"extraEnv[0].name=AWS_ACCESS_KEY_ID",
|
||||
"extraEnv[0].value=foobar",
|
||||
"extraEnv[1].name=AWS_SECRET_ACCESS_KEY",
|
||||
"extraEnv[1].value=foobar",
|
||||
"extraEnv[2].name=AWS_REGION",
|
||||
"extraEnv[2].value=us-east-1",
|
||||
"extraEnv[3].name=AWS_ENDPOINT_URL",
|
||||
"extraEnv[3].value=http://localstack:4566",
|
||||
"replicaCount=1",
|
||||
],
|
||||
))
|
||||
|
||||
k8s_resource(
|
||||
workload="mev-inspect",
|
||||
resource_deps=["postgresql", "redis-master"],
|
||||
)
|
||||
|
||||
k8s_resource(
|
||||
workload="mev-inspect-workers",
|
||||
resource_deps=["postgresql", "redis-master"],
|
||||
)
|
||||
|
||||
# uncomment to enable price monitor
|
||||
# k8s_yaml(helm('./k8s/mev-inspect-prices', name='mev-inspect-prices'))
|
||||
# k8s_resource(workload="mev-inspect-prices", resource_deps=["postgresql"])
|
||||
|
||||
local_resource(
|
||||
'pg-port-forward',
|
||||
serve_cmd='kubectl port-forward --namespace default svc/postgresql 5432:5432',
|
||||
resource_deps=["postgresql"]
|
||||
)
|
||||
|
||||
# if using local S3 exports
|
||||
#k8s_yaml(secret_from_dict("mev-inspect-export", inputs = {
|
||||
# "export-bucket-name" : "local-export",
|
||||
# "export-bucket-region": "us-east-1",
|
||||
# "export-aws-access-key-id": "foobar",
|
||||
# "export-aws-secret-access-key": "foobar",
|
||||
#}))
|
||||
|
||||
#helm_remote(
|
||||
# "localstack",
|
||||
# repo_name="localstack-charts",
|
||||
# repo_url="https://localstack.github.io/helm-charts",
|
||||
#)
|
||||
#
|
||||
#local_resource(
|
||||
# 'localstack-port-forward',
|
||||
# serve_cmd='kubectl port-forward --namespace default svc/localstack 4566:4566',
|
||||
# resource_deps=["localstack"]
|
||||
#)
|
||||
#
|
||||
#k8s_yaml(configmap_from_dict("mev-inspect-export", inputs = {
|
||||
# "services": "s3",
|
||||
#}))
|
89
alembic.ini
89
alembic.ini
@ -1,89 +0,0 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date
|
||||
# within the migration file as well as the filename.
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# sqlalchemy.url = postgresql://postgres:password@db/mev_inspect
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
@ -1 +0,0 @@
|
||||
Generic single-database configuration.
|
@ -1,76 +0,0 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from mev_inspect.db import get_inspect_database_uri
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
config.set_main_option("sqlalchemy.url", get_inspect_database_uri())
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
@ -1,24 +0,0 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -1,54 +0,0 @@
|
||||
"""Change miner payments and transfers primary keys to include block number
|
||||
|
||||
Revision ID: 04a3bb3740c3
|
||||
Revises: a10d68643476
|
||||
Create Date: 2021-11-02 22:42:01.702538
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "04a3bb3740c3"
|
||||
down_revision = "a10d68643476"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# transfers
|
||||
op.execute("ALTER TABLE transfers DROP CONSTRAINT transfers_pkey")
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
op.drop_index("ix_transfers_block_number")
|
||||
|
||||
# miner_payments
|
||||
op.execute("ALTER TABLE miner_payments DROP CONSTRAINT miner_payments_pkey")
|
||||
op.create_primary_key(
|
||||
"miner_payments_pkey",
|
||||
"miner_payments",
|
||||
["block_number", "transaction_hash"],
|
||||
)
|
||||
op.drop_index("ix_block_number")
|
||||
|
||||
|
||||
def downgrade():
|
||||
# transfers
|
||||
op.execute("ALTER TABLE transfers DROP CONSTRAINT transfers_pkey")
|
||||
op.create_index("ix_transfers_block_number", "transfers", ["block_number"])
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
# miner_payments
|
||||
op.execute("ALTER TABLE miner_payments DROP CONSTRAINT miner_payments_pkey")
|
||||
op.create_index("ix_block_number", "miner_payments", ["block_number"])
|
||||
op.create_primary_key(
|
||||
"miner_payments_pkey",
|
||||
"miner_payments",
|
||||
["transaction_hash"],
|
||||
)
|
@ -1,35 +0,0 @@
|
||||
"""Change blocks.timestamp to timestamp
|
||||
|
||||
Revision ID: 04b76ab1d2af
|
||||
Revises: 2c90b2b8a80b
|
||||
Create Date: 2021-11-26 15:31:21.111693
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "04b76ab1d2af"
|
||||
down_revision = "0cef835f7b36"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.alter_column(
|
||||
"blocks",
|
||||
"block_timestamp",
|
||||
type_=sa.TIMESTAMP,
|
||||
nullable=False,
|
||||
postgresql_using="TO_TIMESTAMP(block_timestamp)",
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column(
|
||||
"blocks",
|
||||
"block_timestamp",
|
||||
type_=sa.Numeric,
|
||||
nullable=False,
|
||||
postgresql_using="extract(epoch FROM block_timestamp)",
|
||||
)
|
@ -1,47 +0,0 @@
|
||||
"""Create classifications table
|
||||
|
||||
Revision ID: 0660432b9840
|
||||
Revises:
|
||||
Create Date: 2021-07-23 20:08:42.016711
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0660432b9840"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"classified_traces",
|
||||
sa.Column("classified_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column(
|
||||
"classification",
|
||||
sa.String(256),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("trace_type", sa.String(256), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("abi_name", sa.String(1024), nullable=True),
|
||||
sa.Column("function_name", sa.String(2048), nullable=True),
|
||||
sa.Column("function_signature", sa.String(2048), nullable=True),
|
||||
sa.Column("inputs", sa.JSON, nullable=True),
|
||||
sa.Column("from_address", sa.String(256), nullable=True),
|
||||
sa.Column("to_address", sa.String(256), nullable=True),
|
||||
sa.Column("gas", sa.Numeric, nullable=True),
|
||||
sa.Column("value", sa.Numeric, nullable=True),
|
||||
sa.Column("gas_used", sa.Numeric, nullable=True),
|
||||
sa.Column("error", sa.String(256), nullable=True),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("classified_traces")
|
@ -1,34 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 070819d86587
|
||||
Revises: d498bdb0a641
|
||||
Create Date: 2021-11-26 18:25:13.402822
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d498bdb0a641"
|
||||
down_revision = "b9fa1ecc9929"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"punk_snipes",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("punk_index", sa.Numeric, nullable=False),
|
||||
sa.Column("min_acceptance_price", sa.Numeric, nullable=False),
|
||||
sa.Column("acceptance_price", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number", "transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("punk_snipes")
|
@ -1,35 +0,0 @@
|
||||
"""Create miner_payments table
|
||||
|
||||
Revision ID: 083978d6e455
|
||||
Revises: 92f28a2b4f52
|
||||
Create Date: 2021-08-30 17:42:25.548130
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "083978d6e455"
|
||||
down_revision = "92f28a2b4f52"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"miner_payments",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), primary_key=True),
|
||||
sa.Column("transaction_index", sa.Numeric, nullable=False),
|
||||
sa.Column("miner_address", sa.String(256), nullable=False),
|
||||
sa.Column("coinbase_transfer", sa.Numeric, nullable=False),
|
||||
sa.Column("base_fee_per_gas", sa.Numeric, nullable=False),
|
||||
sa.Column("gas_price", sa.Numeric, nullable=False),
|
||||
sa.Column("gas_price_with_coinbase_transfer", sa.Numeric, nullable=False),
|
||||
sa.Column("gas_used", sa.Numeric, nullable=False),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("miner_payments")
|
@ -1,26 +0,0 @@
|
||||
"""Rename pool_address to contract_address
|
||||
|
||||
Revision ID: 0cef835f7b36
|
||||
Revises: 5427d62a2cc0
|
||||
Create Date: 2021-11-19 15:36:15.152622
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0cef835f7b36"
|
||||
down_revision = "5427d62a2cc0"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.alter_column(
|
||||
"swaps", "pool_address", nullable=False, new_column_name="contract_address"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column(
|
||||
"swaps", "contract_address", nullable=False, new_column_name="pool_address"
|
||||
)
|
@ -1,28 +0,0 @@
|
||||
"""Add nullable transaction_position field to swaps and traces
|
||||
|
||||
Revision ID: 15ba9c27ee8a
|
||||
Revises: 04b76ab1d2af
|
||||
Create Date: 2021-12-02 18:24:18.218880
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "15ba9c27ee8a"
|
||||
down_revision = "ead7eb8283b9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"classified_traces",
|
||||
sa.Column("transaction_position", sa.Numeric, nullable=True),
|
||||
)
|
||||
op.add_column("swaps", sa.Column("transaction_position", sa.Numeric, nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("classified_traces", "transaction_position")
|
||||
op.drop_column("swaps", "transaction_position")
|
@ -1,26 +0,0 @@
|
||||
"""Add received_collateral_address to liquidations
|
||||
|
||||
Revision ID: 205ce02374b3
|
||||
Revises: c8363617aa07
|
||||
Create Date: 2021-10-04 19:52:40.017084
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "205ce02374b3"
|
||||
down_revision = "c8363617aa07"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"liquidations",
|
||||
sa.Column("received_token_address", sa.String(256), nullable=True),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("liquidations", "received_token_address")
|
@ -1,39 +0,0 @@
|
||||
"""Create swaps table
|
||||
|
||||
Revision ID: 2116e2f36a19
|
||||
Revises: c5da44eb072c
|
||||
Create Date: 2021-08-05 21:06:33.340456
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "2116e2f36a19"
|
||||
down_revision = "c5da44eb072c"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"swaps",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("abi_name", sa.String(1024), nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("pool_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("to_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_in_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_in_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("token_out_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_out_amount", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("swaps")
|
@ -1,28 +0,0 @@
|
||||
"""Add blocks table
|
||||
|
||||
Revision ID: 2c90b2b8a80b
|
||||
Revises: 04a3bb3740c3
|
||||
Create Date: 2021-11-17 18:29:13.065944
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "2c90b2b8a80b"
|
||||
down_revision = "04a3bb3740c3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"blocks",
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("block_timestamp", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("blocks")
|
@ -1,22 +0,0 @@
|
||||
"""Add index on block_number for miner_payments
|
||||
|
||||
Revision ID: 320e56b0a99f
|
||||
Revises: a02f3f2c469f
|
||||
Create Date: 2021-09-14 11:11:41.559137
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "320e56b0a99f"
|
||||
down_revision = "a02f3f2c469f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_index("ix_block_number", "miner_payments", ["block_number"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_block_number", "miner_payments")
|
@ -1,45 +0,0 @@
|
||||
"""Cahnge swap primary key to include block number
|
||||
|
||||
Revision ID: 3417f49d97b3
|
||||
Revises: 205ce02374b3
|
||||
Create Date: 2021-11-02 20:50:32.854996
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3417f49d97b3"
|
||||
down_revision = "205ce02374b3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("ALTER TABLE swaps DROP CONSTRAINT swaps_pkey CASCADE")
|
||||
op.create_primary_key(
|
||||
"swaps_pkey",
|
||||
"swaps",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
op.create_index(
|
||||
"arbitrage_swaps_swaps_idx",
|
||||
"arbitrage_swaps",
|
||||
["swap_transaction_hash", "swap_trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("arbitrage_swaps_swaps_idx")
|
||||
op.execute("ALTER TABLE swaps DROP CONSTRAINT swaps_pkey CASCADE")
|
||||
op.create_primary_key(
|
||||
"swaps_pkey",
|
||||
"swaps",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
op.create_foreign_key(
|
||||
"arbitrage_swaps_swaps_fkey",
|
||||
"arbitrage_swaps",
|
||||
"swaps",
|
||||
["swap_transaction_hash", "swap_trace_address"],
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
@ -1,40 +0,0 @@
|
||||
"""Create NFT Trades table
|
||||
|
||||
Revision ID: 3c54832385e3
|
||||
Revises: 4b9d289f2d74
|
||||
Create Date: 2021-12-19 22:50:28.936516
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3c54832385e3"
|
||||
down_revision = "4b9d289f2d74"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"nft_trades",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("abi_name", sa.String(1024), nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("transaction_position", sa.Numeric, nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=False),
|
||||
sa.Column("error", sa.String(256), nullable=True),
|
||||
sa.Column("seller_address", sa.String(256), nullable=False),
|
||||
sa.Column("buyer_address", sa.String(256), nullable=False),
|
||||
sa.Column("payment_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("payment_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("collection_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_id", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("nft_trades")
|
@ -1,23 +0,0 @@
|
||||
"""Add error column to liquidations
|
||||
|
||||
Revision ID: 4b9d289f2d74
|
||||
Revises: 99d376cb93cc
|
||||
Create Date: 2021-12-23 14:54:28.406159
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "4b9d289f2d74"
|
||||
down_revision = "99d376cb93cc"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("liquidations", sa.Column("error", sa.String(256), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("liquidations", "error")
|
@ -1,33 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 52d75a7e0533
|
||||
Revises: 7cf0eeb41da0
|
||||
Create Date: 2021-11-26 20:35:58.954138
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "52d75a7e0533"
|
||||
down_revision = "7cf0eeb41da0"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"punk_bid_acceptances",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("punk_index", sa.Numeric, nullable=False),
|
||||
sa.Column("min_price", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number", "transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("punk_bid_acceptances")
|
@ -1,46 +0,0 @@
|
||||
"""Change transfers trace address to ARRAY
|
||||
|
||||
Revision ID: 5427d62a2cc0
|
||||
Revises: d540242ae368
|
||||
Create Date: 2021-11-19 13:25:11.252774
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5427d62a2cc0"
|
||||
down_revision = "d540242ae368"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_constraint("transfers_pkey", "transfers")
|
||||
op.alter_column(
|
||||
"transfers",
|
||||
"trace_address",
|
||||
type_=sa.ARRAY(sa.Integer),
|
||||
nullable=False,
|
||||
postgresql_using="trace_address::int[]",
|
||||
)
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint("transfers_pkey", "transfers")
|
||||
op.alter_column(
|
||||
"transfers",
|
||||
"trace_address",
|
||||
type_=sa.String(256),
|
||||
nullable=False,
|
||||
)
|
||||
op.create_primary_key(
|
||||
"transfers_pkey",
|
||||
"transfers",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
@ -1,32 +0,0 @@
|
||||
"""Add block_number to nft_trades primary key
|
||||
|
||||
Revision ID: 5c5375de15fd
|
||||
Revises: e616420acd18
|
||||
Create Date: 2022-01-21 15:27:57.790340
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5c5375de15fd"
|
||||
down_revision = "e616420acd18"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("ALTER TABLE nft_trades DROP CONSTRAINT nft_trades_pkey")
|
||||
op.create_primary_key(
|
||||
"nft_trades_pkey",
|
||||
"nft_trades",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute("ALTER TABLE nft_trades DROP CONSTRAINT nft_trades_pkey")
|
||||
op.create_primary_key(
|
||||
"nft_trades_pkey",
|
||||
"nft_trades",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
@ -1,22 +0,0 @@
|
||||
"""Make gross profit nullable on summary
|
||||
|
||||
Revision ID: 630783c18a93
|
||||
Revises: ab9a9e449ff9
|
||||
Create Date: 2022-01-19 23:09:51.816948
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "630783c18a93"
|
||||
down_revision = "ab9a9e449ff9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.alter_column("mev_summary", "gross_profit_usd", nullable=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column("mev_summary", "gross_profit_usd", nullable=False)
|
@ -1,33 +0,0 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 7cf0eeb41da0
|
||||
Revises: d498bdb0a641
|
||||
Create Date: 2021-11-26 20:27:28.936516
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "7cf0eeb41da0"
|
||||
down_revision = "d498bdb0a641"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"punk_bids",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("punk_index", sa.Numeric, nullable=False),
|
||||
sa.Column("price", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("block_number", "transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("punk_bids")
|
@ -1,49 +0,0 @@
|
||||
"""Change trace addresses to array types
|
||||
|
||||
Revision ID: 7eec417a4f3e
|
||||
Revises: 9d8c69b3dccb
|
||||
Create Date: 2021-08-06 15:58:04.556762
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "7eec417a4f3e"
|
||||
down_revision = "9d8c69b3dccb"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_constraint("swaps_pkey", "swaps")
|
||||
op.drop_column("swaps", "trace_address")
|
||||
op.add_column("swaps", sa.Column("trace_address", sa.ARRAY(sa.Integer)))
|
||||
op.create_primary_key("swaps_pkey", "swaps", ["transaction_hash", "trace_address"])
|
||||
|
||||
op.drop_constraint("classified_traces_pkey", "classified_traces")
|
||||
op.drop_column("classified_traces", "trace_address")
|
||||
op.add_column("classified_traces", sa.Column("trace_address", sa.ARRAY(sa.Integer)))
|
||||
op.create_primary_key(
|
||||
"classified_traces_pkey",
|
||||
"classified_traces",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint("swaps_pkey", "swaps")
|
||||
op.drop_column("swaps", "trace_address")
|
||||
op.add_column("swaps", sa.Column("trace_address", sa.String))
|
||||
|
||||
op.create_primary_key("swaps_pkey", "swaps", ["transaction_hash", "trace_address"])
|
||||
|
||||
op.drop_constraint("classified_traces_pkey", "classified_traces")
|
||||
op.drop_column("classified_traces", "trace_address")
|
||||
op.add_column("classified_traces", sa.Column("trace_address", sa.String))
|
||||
|
||||
op.create_primary_key(
|
||||
"classified_traces_pkey",
|
||||
"classified_traces",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
@ -1,23 +0,0 @@
|
||||
"""Add error column to swaps
|
||||
|
||||
Revision ID: 92f28a2b4f52
|
||||
Revises: 9b8ae51c5d56
|
||||
Create Date: 2021-08-17 03:46:21.498821
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "92f28a2b4f52"
|
||||
down_revision = "9b8ae51c5d56"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("swaps", sa.Column("error", sa.String(256), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("swaps", "error")
|
@ -1,23 +0,0 @@
|
||||
"""error column
|
||||
|
||||
Revision ID: 99d376cb93cc
|
||||
Revises: c4a7620a2d33
|
||||
Create Date: 2021-12-21 21:26:12.142484
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "99d376cb93cc"
|
||||
down_revision = "c4a7620a2d33"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("arbitrages", sa.Column("error", sa.String(256), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("arbitrages", "error")
|
@ -1,37 +0,0 @@
|
||||
"""Add swap arbitrage join table
|
||||
|
||||
Revision ID: 9b8ae51c5d56
|
||||
Revises: 7eec417a4f3e
|
||||
Create Date: 2021-08-06 17:06:55.364516
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9b8ae51c5d56"
|
||||
down_revision = "7eec417a4f3e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"arbitrage_swaps",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("arbitrage_id", sa.String(1024), primary_key=True),
|
||||
sa.Column("swap_transaction_hash", sa.String(66), primary_key=True),
|
||||
sa.Column("swap_trace_address", sa.ARRAY(sa.Integer), primary_key=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["arbitrage_id"], ["arbitrages.id"], ondelete="CASCADE"
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["swap_transaction_hash", "swap_trace_address"],
|
||||
["swaps.transaction_hash", "swaps.trace_address"],
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("arbitrage_swaps")
|
@ -1,34 +0,0 @@
|
||||
"""Add arbitrages and swap join table
|
||||
|
||||
Revision ID: 9d8c69b3dccb
|
||||
Revises: 2116e2f36a19
|
||||
Create Date: 2021-08-05 21:46:35.209199
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9d8c69b3dccb"
|
||||
down_revision = "2116e2f36a19"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"arbitrages",
|
||||
sa.Column("id", sa.String(256), primary_key=True),
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("account_address", sa.String(256), nullable=False),
|
||||
sa.Column("profit_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(256), nullable=False),
|
||||
sa.Column("start_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("end_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("profit_amount", sa.Numeric, nullable=False),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("arbitrages")
|
@ -1,27 +0,0 @@
|
||||
"""Create latest block table
|
||||
|
||||
Revision ID: a02f3f2c469f
|
||||
Revises: d70c08b4db6f
|
||||
Create Date: 2021-09-13 21:32:27.181344
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a02f3f2c469f"
|
||||
down_revision = "d70c08b4db6f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"latest_block_update",
|
||||
sa.Column("block_number", sa.Numeric, primary_key=True),
|
||||
sa.Column("updated_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("latest_block_update")
|
@ -1,34 +0,0 @@
|
||||
"""Change classified traces primary key to include block number
|
||||
|
||||
Revision ID: a10d68643476
|
||||
Revises: 3417f49d97b3
|
||||
Create Date: 2021-11-02 22:03:26.312317
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a10d68643476"
|
||||
down_revision = "3417f49d97b3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("ALTER TABLE classified_traces DROP CONSTRAINT classified_traces_pkey")
|
||||
op.create_primary_key(
|
||||
"classified_traces_pkey",
|
||||
"classified_traces",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
op.drop_index("i_block_number")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute("ALTER TABLE classified_traces DROP CONSTRAINT classified_traces_pkey")
|
||||
op.create_index("i_block_number", "classified_traces", ["block_number"])
|
||||
op.create_primary_key(
|
||||
"classified_traces_pkey",
|
||||
"classified_traces",
|
||||
["transaction_hash", "trace_address"],
|
||||
)
|
@ -1,40 +0,0 @@
|
||||
"""Create mev_summary table
|
||||
|
||||
Revision ID: ab9a9e449ff9
|
||||
Revises: b26ab0051a88
|
||||
Create Date: 2022-01-18 18:36:42.865154
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "ab9a9e449ff9"
|
||||
down_revision = "b26ab0051a88"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"mev_summary",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("block_timestamp", sa.TIMESTAMP, nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("type", sa.String(256), nullable=False),
|
||||
sa.Column("gross_profit_usd", sa.Numeric, nullable=False),
|
||||
sa.Column("miner_payment_usd", sa.Numeric, nullable=False),
|
||||
sa.Column("gas_used", sa.Numeric, nullable=False),
|
||||
sa.Column("gas_price", sa.Numeric, nullable=False),
|
||||
sa.Column("coinbase_transfer", sa.Numeric, nullable=False),
|
||||
sa.Column("gas_price_with_coinbase_transfer", sa.Numeric, nullable=False),
|
||||
sa.Column("miner_address", sa.String(256), nullable=False),
|
||||
sa.Column("base_fee_per_gas", sa.Numeric, nullable=False),
|
||||
sa.Column("error", sa.String(256), nullable=True),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("mev_summary")
|
@ -1,27 +0,0 @@
|
||||
"""add profit_amount column to sandwiches table
|
||||
|
||||
Revision ID: b26ab0051a88
|
||||
Revises: 3c54832385e3
|
||||
Create Date: 2022-01-16 13:45:10.190969
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "b26ab0051a88"
|
||||
down_revision = "3c54832385e3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"sandwiches", sa.Column("profit_token_address", sa.String(256), nullable=True)
|
||||
)
|
||||
op.add_column("sandwiches", sa.Column("profit_amount", sa.Numeric, nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("sandwiches", "profit_token_address")
|
||||
op.drop_column("sandwiches", "profit_amount")
|
@ -1,26 +0,0 @@
|
||||
"""Remove collateral_token_address column
|
||||
|
||||
Revision ID: b9fa1ecc9929
|
||||
Revises: 04b76ab1d2af
|
||||
Create Date: 2021-12-01 23:32:40.574108
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "b9fa1ecc9929"
|
||||
down_revision = "04b76ab1d2af"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.drop_column("liquidations", "collateral_token_address")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.add_column(
|
||||
"liquidations",
|
||||
sa.Column("collateral_token_address", sa.String(256), nullable=False),
|
||||
)
|
@ -1,40 +0,0 @@
|
||||
"""Add tokens to database
|
||||
|
||||
Revision ID: bba80d21c5a4
|
||||
Revises: b26ab0051a88
|
||||
Create Date: 2022-01-19 22:19:59.514998
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "bba80d21c5a4"
|
||||
down_revision = "630783c18a93"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute(
|
||||
"""
|
||||
INSERT INTO tokens (token_address,decimals) VALUES
|
||||
('0x514910771af9ca656af840dff83e8264ecf986ca',18),
|
||||
('0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2',18),
|
||||
('0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee',18),
|
||||
('0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e',18),
|
||||
('0x5d3a536e4d6dbd6114cc1ead35777bab948e3643',8),
|
||||
('0x2260fac5e5542a773aa44fbcfedf7c193bc2c599',8),
|
||||
('0x80fb784b7ed66730e8b1dbd9820afd29931aab03',18),
|
||||
('0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5',8),
|
||||
('0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48',6),
|
||||
('0xdac17f958d2ee523a2206206994597c13d831ec7',6),
|
||||
('0x6b175474e89094c44da98b954eedeac495271d0f',18),
|
||||
('0x0000000000085d4780b73119b644ae5ecd22b376',18),
|
||||
('0x39aa39c021dfbae8fac545936693ac917d5e7563',8),
|
||||
('0x7fc66500c84a76ad7e9c93437bfc5ac33e2ddae9',18);
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute("DELETE FROM tokens")
|
@ -1,26 +0,0 @@
|
||||
"""Add protocols column to arbitrages
|
||||
|
||||
Revision ID: bdbb545f6c03
|
||||
Revises: bba80d21c5a4
|
||||
Create Date: 2022-01-20 23:17:19.316008
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "bdbb545f6c03"
|
||||
down_revision = "bba80d21c5a4"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"arbitrages",
|
||||
sa.Column("protocols", sa.ARRAY(sa.String(256)), server_default="{}"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("arbitrages", "protocols")
|
@ -1,28 +0,0 @@
|
||||
"""Create tokens table
|
||||
|
||||
Revision ID: c4a7620a2d33
|
||||
Revises: 15ba9c27ee8a
|
||||
Create Date: 2021-12-21 19:12:33.940117
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c4a7620a2d33"
|
||||
down_revision = "15ba9c27ee8a"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"tokens",
|
||||
sa.Column("token_address", sa.String(256), nullable=False),
|
||||
sa.Column("decimals", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("token_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("tokens")
|
@ -1,22 +0,0 @@
|
||||
"""Add index for classified_traces.block_number
|
||||
|
||||
Revision ID: c5da44eb072c
|
||||
Revises: 0660432b9840
|
||||
Create Date: 2021-07-30 17:37:27.335475
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c5da44eb072c"
|
||||
down_revision = "0660432b9840"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_index("i_block_number", "classified_traces", ["block_number"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("i_block_number", "classified_traces")
|
@ -1,37 +0,0 @@
|
||||
"""Create liquidations table
|
||||
|
||||
Revision ID: c8363617aa07
|
||||
Revises: cd96af55108e
|
||||
Create Date: 2021-09-29 14:00:06.857103
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c8363617aa07"
|
||||
down_revision = "cd96af55108e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"liquidations",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("liquidated_user", sa.String(256), nullable=False),
|
||||
sa.Column("liquidator_user", sa.String(256), nullable=False),
|
||||
sa.Column("collateral_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("debt_token_address", sa.String(256), nullable=False),
|
||||
sa.Column("debt_purchase_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("received_amount", sa.Numeric, nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("liquidations")
|
@ -1,38 +0,0 @@
|
||||
"""Add transfers table
|
||||
|
||||
Revision ID: cd96af55108e
|
||||
Revises: 5437dc68f4df
|
||||
Create Date: 2021-09-17 12:44:45.245137
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "cd96af55108e"
|
||||
down_revision = "320e56b0a99f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"transfers",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("transaction_hash", sa.String(66), nullable=False),
|
||||
sa.Column("trace_address", sa.String(256), nullable=False),
|
||||
sa.Column("protocol", sa.String(256), nullable=True),
|
||||
sa.Column("from_address", sa.String(256), nullable=False),
|
||||
sa.Column("to_address", sa.String(256), nullable=False),
|
||||
sa.Column("token_address", sa.String(256), nullable=False),
|
||||
sa.Column("amount", sa.Numeric, nullable=False),
|
||||
sa.Column("error", sa.String(256), nullable=True),
|
||||
sa.PrimaryKeyConstraint("transaction_hash", "trace_address"),
|
||||
)
|
||||
op.create_index("ix_transfers_block_number", "transfers", ["block_number"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_transfers_block_number", "transfers")
|
||||
op.drop_table("transfers")
|
@ -1,29 +0,0 @@
|
||||
"""Create usd_prices table
|
||||
|
||||
Revision ID: d540242ae368
|
||||
Revises: 2c90b2b8a80b
|
||||
Create Date: 2021-11-18 04:30:06.802857
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d540242ae368"
|
||||
down_revision = "2c90b2b8a80b"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"prices",
|
||||
sa.Column("timestamp", sa.TIMESTAMP),
|
||||
sa.Column("usd_price", sa.Numeric, nullable=False),
|
||||
sa.Column("token_address", sa.String(256), nullable=False),
|
||||
sa.PrimaryKeyConstraint("token_address", "timestamp"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("prices")
|
@ -1,31 +0,0 @@
|
||||
"""Add to_address and from_address to miner_payments table
|
||||
|
||||
Revision ID: d70c08b4db6f
|
||||
Revises: 083978d6e455
|
||||
Create Date: 2021-08-30 22:10:04.186251
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d70c08b4db6f"
|
||||
down_revision = "083978d6e455"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"miner_payments",
|
||||
sa.Column("transaction_to_address", sa.String(256), nullable=True),
|
||||
)
|
||||
op.add_column(
|
||||
"miner_payments",
|
||||
sa.Column("transaction_from_address", sa.String(256), nullable=True),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("miner_payments", "transaction_to_address")
|
||||
op.drop_column("miner_payments", "transaction_from_address")
|
@ -1,26 +0,0 @@
|
||||
"""Add protocols column to mev_summary
|
||||
|
||||
Revision ID: e616420acd18
|
||||
Revises: bdbb545f6c03
|
||||
Create Date: 2022-01-21 00:11:51.516459
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "e616420acd18"
|
||||
down_revision = "bdbb545f6c03"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"mev_summary",
|
||||
sa.Column("protocols", sa.ARRAY(sa.String(256)), server_default="{}"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("mev_summary", "protocols")
|
@ -1,69 +0,0 @@
|
||||
"""Create sandwiches and sandwiched swaps tables
|
||||
|
||||
Revision ID: ead7eb8283b9
|
||||
Revises: a5d80460f0e6
|
||||
Create Date: 2021-12-03 16:37:28.077158
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "ead7eb8283b9"
|
||||
down_revision = "52d75a7e0533"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"sandwiches",
|
||||
sa.Column("id", sa.String(256), primary_key=True),
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("block_number", sa.Numeric, nullable=False),
|
||||
sa.Column("sandwicher_address", sa.String(256), nullable=False),
|
||||
sa.Column("frontrun_swap_transaction_hash", sa.String(256), nullable=False),
|
||||
sa.Column("frontrun_swap_trace_address", sa.ARRAY(sa.Integer), nullable=False),
|
||||
sa.Column("backrun_swap_transaction_hash", sa.String(256), nullable=False),
|
||||
sa.Column("backrun_swap_trace_address", sa.ARRAY(sa.Integer), nullable=False),
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"ik_sandwiches_frontrun",
|
||||
"sandwiches",
|
||||
[
|
||||
"block_number",
|
||||
"frontrun_swap_transaction_hash",
|
||||
"frontrun_swap_trace_address",
|
||||
],
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"ik_sandwiches_backrun",
|
||||
"sandwiches",
|
||||
["block_number", "backrun_swap_transaction_hash", "backrun_swap_trace_address"],
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"sandwiched_swaps",
|
||||
sa.Column("created_at", sa.TIMESTAMP, server_default=sa.func.now()),
|
||||
sa.Column("sandwich_id", sa.String(1024), primary_key=True),
|
||||
sa.Column("block_number", sa.Numeric, primary_key=True),
|
||||
sa.Column("transaction_hash", sa.String(66), primary_key=True),
|
||||
sa.Column("trace_address", sa.ARRAY(sa.Integer), primary_key=True),
|
||||
sa.ForeignKeyConstraint(["sandwich_id"], ["sandwiches.id"], ondelete="CASCADE"),
|
||||
)
|
||||
|
||||
op.create_index(
|
||||
"ik_sandwiched_swaps_secondary",
|
||||
"sandwiched_swaps",
|
||||
["block_number", "transaction_hash", "trace_address"],
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ik_sandwiched_swaps_secondary")
|
||||
op.drop_table("sandwiched_swaps")
|
||||
op.drop_index("ik_sandwiches_frontrun")
|
||||
op.drop_index("ik_sandwiches_backrun")
|
||||
op.drop_table("sandwiches")
|
219
cli.py
219
cli.py
@ -1,219 +0,0 @@
|
||||
import fileinput
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
import dramatiq
|
||||
|
||||
from mev_inspect.concurrency import coro
|
||||
from mev_inspect.crud.prices import write_prices
|
||||
from mev_inspect.db import get_inspect_session, get_trace_session
|
||||
from mev_inspect.inspector import MEVInspector
|
||||
from mev_inspect.prices import fetch_prices, fetch_prices_range
|
||||
from mev_inspect.queue.broker import connect_broker
|
||||
from mev_inspect.queue.tasks import (
|
||||
LOW_PRIORITY,
|
||||
LOW_PRIORITY_QUEUE,
|
||||
backfill_export_task,
|
||||
inspect_many_blocks_task,
|
||||
)
|
||||
from mev_inspect.s3_export import export_block
|
||||
|
||||
RPC_URL_ENV = "RPC_URL"
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@coro
|
||||
async def inspect_block_command(block_number: int, rpc: str):
|
||||
inspect_db_session = get_inspect_session()
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
inspector = MEVInspector(rpc)
|
||||
|
||||
await inspector.inspect_single_block(
|
||||
inspect_db_session=inspect_db_session,
|
||||
trace_db_session=trace_db_session,
|
||||
block=block_number,
|
||||
)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@coro
|
||||
async def fetch_block_command(block_number: int, rpc: str):
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
inspector = MEVInspector(rpc)
|
||||
block = await inspector.create_from_block(
|
||||
block_number=block_number,
|
||||
trace_db_session=trace_db_session,
|
||||
)
|
||||
|
||||
print(block.json())
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("after_block", type=int)
|
||||
@click.argument("before_block", type=int)
|
||||
@click.option("--rpc", default=lambda: os.environ.get(RPC_URL_ENV, ""))
|
||||
@click.option(
|
||||
"--max-concurrency",
|
||||
type=int,
|
||||
help="maximum number of concurrent connections",
|
||||
default=5,
|
||||
)
|
||||
@click.option(
|
||||
"--request-timeout", type=int, help="timeout for requests to nodes", default=500
|
||||
)
|
||||
@coro
|
||||
async def inspect_many_blocks_command(
|
||||
after_block: int,
|
||||
before_block: int,
|
||||
rpc: str,
|
||||
max_concurrency: int,
|
||||
request_timeout: int,
|
||||
):
|
||||
inspect_db_session = get_inspect_session()
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
inspector = MEVInspector(
|
||||
rpc,
|
||||
max_concurrency=max_concurrency,
|
||||
request_timeout=request_timeout,
|
||||
)
|
||||
await inspector.inspect_many_blocks(
|
||||
inspect_db_session=inspect_db_session,
|
||||
trace_db_session=trace_db_session,
|
||||
after_block=after_block,
|
||||
before_block=before_block,
|
||||
)
|
||||
|
||||
|
||||
@cli.command()
|
||||
def enqueue_block_list_command():
|
||||
broker = connect_broker()
|
||||
inspect_many_blocks_actor = dramatiq.actor(
|
||||
inspect_many_blocks_task,
|
||||
broker=broker,
|
||||
queue_name=LOW_PRIORITY_QUEUE,
|
||||
priority=LOW_PRIORITY,
|
||||
)
|
||||
|
||||
for block_string in fileinput.input():
|
||||
block = int(block_string)
|
||||
logger.info(f"Sending {block} to {block+1}")
|
||||
inspect_many_blocks_actor.send(block, block + 1)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("start_block", type=int)
|
||||
@click.argument("end_block", type=int)
|
||||
@click.argument("batch_size", type=int, default=10)
|
||||
def enqueue_many_blocks_command(start_block: int, end_block: int, batch_size: int):
|
||||
broker = connect_broker()
|
||||
inspect_many_blocks_actor = dramatiq.actor(
|
||||
inspect_many_blocks_task,
|
||||
broker=broker,
|
||||
queue_name=LOW_PRIORITY_QUEUE,
|
||||
priority=LOW_PRIORITY,
|
||||
)
|
||||
|
||||
if start_block < end_block:
|
||||
after_block = start_block
|
||||
before_block = end_block
|
||||
|
||||
for batch_after_block in range(after_block, before_block, batch_size):
|
||||
batch_before_block = min(batch_after_block + batch_size, before_block)
|
||||
logger.info(f"Sending {batch_after_block} to {batch_before_block}")
|
||||
inspect_many_blocks_actor.send(batch_after_block, batch_before_block)
|
||||
else:
|
||||
after_block = end_block
|
||||
before_block = start_block
|
||||
|
||||
for batch_before_block in range(before_block, after_block, -1 * batch_size):
|
||||
batch_after_block = max(batch_before_block - batch_size, after_block)
|
||||
logger.info(f"Sending {batch_after_block} to {batch_before_block}")
|
||||
inspect_many_blocks_actor.send(batch_after_block, batch_before_block)
|
||||
|
||||
|
||||
@cli.command()
|
||||
def fetch_all_prices():
|
||||
inspect_db_session = get_inspect_session()
|
||||
|
||||
logger.info("Fetching prices")
|
||||
prices = fetch_prices()
|
||||
|
||||
logger.info("Writing prices")
|
||||
write_prices(inspect_db_session, prices)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
def enqueue_s3_export(block_number: int):
|
||||
broker = connect_broker()
|
||||
export_actor = dramatiq.actor(
|
||||
backfill_export_task,
|
||||
broker=broker,
|
||||
queue_name=LOW_PRIORITY_QUEUE,
|
||||
priority=LOW_PRIORITY,
|
||||
)
|
||||
logger.info(f"Sending block {block_number} export to queue")
|
||||
export_actor.send(block_number)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("after_block", type=int)
|
||||
@click.argument("before_block", type=int)
|
||||
def enqueue_many_s3_exports(after_block: int, before_block: int):
|
||||
broker = connect_broker()
|
||||
export_actor = dramatiq.actor(
|
||||
backfill_export_task,
|
||||
broker=broker,
|
||||
queue_name=LOW_PRIORITY_QUEUE,
|
||||
priority=LOW_PRIORITY,
|
||||
)
|
||||
logger.info(f"Sending blocks {after_block} to {before_block} to queue")
|
||||
for block_number in range(after_block, before_block):
|
||||
export_actor.send(block_number)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("block_number", type=int)
|
||||
def s3_export(block_number: int):
|
||||
inspect_db_session = get_inspect_session()
|
||||
logger.info(f"Exporting {block_number}")
|
||||
export_block(inspect_db_session, block_number)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("after", type=click.DateTime(formats=["%Y-%m-%d", "%m-%d-%Y"]))
|
||||
@click.argument("before", type=click.DateTime(formats=["%Y-%m-%d", "%m-%d-%Y"]))
|
||||
def fetch_range(after: datetime, before: datetime):
|
||||
inspect_db_session = get_inspect_session()
|
||||
|
||||
logger.info("Fetching prices")
|
||||
prices = fetch_prices_range(after, before)
|
||||
|
||||
logger.info("Writing prices")
|
||||
write_prices(inspect_db_session, prices)
|
||||
|
||||
|
||||
def get_rpc_url() -> str:
|
||||
return os.environ["RPC_URL"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
33
docker-compose.yml
Normal file
33
docker-compose.yml
Normal file
@ -0,0 +1,33 @@
|
||||
services:
|
||||
mev-inspect:
|
||||
build: .
|
||||
depends_on:
|
||||
- db
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- .:/app
|
||||
tty: true
|
||||
|
||||
db:
|
||||
image: postgres:12
|
||||
volumes:
|
||||
- mev-inspect-db-data:/var/lib/postgresql/data/pgdata
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- PGDATA=/var/lib/postgresql/data/pgdata
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4
|
||||
networks:
|
||||
- default
|
||||
depends_on:
|
||||
- db
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "5050:5050"
|
||||
|
||||
volumes:
|
||||
mev-inspect-db-data:
|
41
examples/uniswap_inspect.py
Normal file
41
examples/uniswap_inspect.py
Normal file
@ -0,0 +1,41 @@
|
||||
import argparse
|
||||
|
||||
from web3 import Web3
|
||||
|
||||
from mev_inspect import block
|
||||
from mev_inspect.inspectors.uniswap import UniswapInspector
|
||||
from mev_inspect.processor import Processor
|
||||
|
||||
parser = argparse.ArgumentParser(description="Inspect some blocks.")
|
||||
parser.add_argument(
|
||||
"-block_number",
|
||||
metavar="b",
|
||||
type=int,
|
||||
nargs="+",
|
||||
help="the block number you are targetting, eventually this will need to be changed",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-rpc", metavar="r", help="rpc endpoint, this needs to have parity style traces"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
## Set up the base provider, but don't wrap it in web3 so we can make requests to it with make_request()
|
||||
base_provider = Web3.HTTPProvider(args.rpc)
|
||||
|
||||
## Get block data that we need
|
||||
block_data = block.create_from_block_number(args.block_number[0], base_provider)
|
||||
print(f"Total traces: {len(block_data.traces)}")
|
||||
|
||||
total_transactions = len(
|
||||
set(t.transaction_hash for t in block_data.traces if t.transaction_hash is not None)
|
||||
)
|
||||
print(f"Total transactions: {total_transactions}")
|
||||
|
||||
## Build a Uniswap inspector
|
||||
uniswap_inspector = UniswapInspector(base_provider)
|
||||
|
||||
## Create a processor, pass in an ARRAY of inspects
|
||||
processor = Processor([uniswap_inspector, uniswap_inspector])
|
||||
|
||||
classifications = processor.get_transaction_evaluations(block_data)
|
||||
print(f"Returned {len(classifications)} classifications")
|
@ -1,23 +0,0 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
@ -1,24 +0,0 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect-prices
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
@ -1,62 +0,0 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect-prices.chart" . }}
|
||||
{{ include "mev-inspect-prices.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect-prices.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect-prices.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect-prices.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
@ -1,35 +0,0 @@
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ include "mev-inspect-prices.fullname" . }}
|
||||
spec:
|
||||
schedule: "0 */1 * * *"
|
||||
successfulJobsHistoryLimit: 0
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args:
|
||||
- run
|
||||
- fetch-all-prices
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
restartPolicy: Never
|
@ -1,7 +0,0 @@
|
||||
image:
|
||||
repository: mev-inspect-py
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
@ -1,23 +0,0 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
@ -1,24 +0,0 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect-workers
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
@ -1,62 +0,0 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect-worker.chart" . }}
|
||||
{{ include "mev-inspect-worker.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect-worker.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect-worker.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect-worker.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
@ -1,133 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "mev-inspect-worker.fullname" . }}
|
||||
labels:
|
||||
{{- include "mev-inspect-worker.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "mev-inspect-worker.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "mev-inspect-worker.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args: ["run", "dramatiq", "worker", "--threads=1", "--processes=1"]
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: REDIS_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: redis
|
||||
key: redis-password
|
||||
- name: TRACE_DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: host
|
||||
optional: true
|
||||
- name: TRACE_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: username
|
||||
optional: true
|
||||
- name: TRACE_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: password
|
||||
optional: true
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
- name: LISTENER_HEALTHCHECK_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-listener-healthcheck
|
||||
key: url
|
||||
optional: true
|
||||
- name: EXPORT_BUCKET_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-bucket-name
|
||||
optional: true
|
||||
- name: EXPORT_BUCKET_REGION
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-bucket-region
|
||||
optional: true
|
||||
- name: EXPORT_AWS_ACCESS_KEY_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-aws-access-key-id
|
||||
optional: true
|
||||
- name: EXPORT_AWS_SECRET_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-aws-secret-access-key
|
||||
optional: true
|
||||
{{- range .Values.extraEnv }}
|
||||
- name: {{ .name }}
|
||||
value: {{ .value }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
@ -1,45 +0,0 @@
|
||||
# Default values for mev-inspect-workers
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: mev-inspect-py:latest
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- ALL
|
||||
# readOnlyRootFilesystem: true
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1000
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
@ -1,23 +0,0 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
@ -1,24 +0,0 @@
|
||||
apiVersion: v2
|
||||
name: mev-inspect
|
||||
description: A Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.0
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
@ -1,62 +0,0 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "mev-inspect.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "mev-inspect.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "mev-inspect.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "mev-inspect.labels" -}}
|
||||
helm.sh/chart: {{ include "mev-inspect.chart" . }}
|
||||
{{ include "mev-inspect.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "mev-inspect.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "mev-inspect.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "mev-inspect.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "mev-inspect.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
@ -1,133 +0,0 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "mev-inspect.fullname" . }}
|
||||
labels:
|
||||
{{- include "mev-inspect.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "mev-inspect.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
{{- include "mev-inspect.selectorLabels" . | nindent 8 }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
args: ["run", "python", "loop.py"]
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- ls
|
||||
- /
|
||||
initialDelaySeconds: 20
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
env:
|
||||
- name: POSTGRES_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: host
|
||||
- name: POSTGRES_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: username
|
||||
- name: POSTGRES_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-db-credentials
|
||||
key: password
|
||||
- name: REDIS_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: redis
|
||||
key: redis-password
|
||||
- name: TRACE_DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: host
|
||||
optional: true
|
||||
- name: TRACE_DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: username
|
||||
optional: true
|
||||
- name: TRACE_DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: trace-db-credentials
|
||||
key: password
|
||||
optional: true
|
||||
- name: RPC_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-rpc
|
||||
key: url
|
||||
- name: LISTENER_HEALTHCHECK_URL
|
||||
valueFrom:
|
||||
configMapKeyRef:
|
||||
name: mev-inspect-listener-healthcheck
|
||||
key: url
|
||||
optional: true
|
||||
- name: EXPORT_BUCKET_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-bucket-name
|
||||
optional: true
|
||||
- name: EXPORT_BUCKET_REGION
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-bucket-region
|
||||
optional: true
|
||||
- name: EXPORT_AWS_ACCESS_KEY_ID
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-aws-access-key-id
|
||||
optional: true
|
||||
- name: EXPORT_AWS_SECRET_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: mev-inspect-export
|
||||
key: export-aws-secret-access-key
|
||||
optional: true
|
||||
{{- range .Values.extraEnv }}
|
||||
- name: {{ .name }}
|
||||
value: {{ .value }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
@ -1,46 +0,0 @@
|
||||
# Default values for mev-inspect.
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
replicaCount: 1
|
||||
|
||||
image:
|
||||
repository: mev-inspect-py:latest
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
imagePullSecrets: []
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
podAnnotations: {}
|
||||
|
||||
podSecurityContext: {}
|
||||
# fsGroup: 2000
|
||||
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: false
|
||||
capabilities:
|
||||
drop:
|
||||
- all
|
||||
#readOnlyRootFilesystem: true
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1000
|
||||
|
||||
|
||||
resources: {}
|
||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
||||
# choice for the user. This also increases chances charts run on environments with little
|
||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
||||
# limits:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
# requests:
|
||||
# cpu: 100m
|
||||
# memory: 128Mi
|
||||
|
||||
nodeSelector: {}
|
||||
|
||||
tolerations: []
|
||||
|
||||
affinity: {}
|
54
listener
54
listener
@ -1,54 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
NAME=listener
|
||||
PIDFILE=/home/flashbot/$NAME.pid
|
||||
DAEMON=/bin/bash
|
||||
DAEMON_OPTS='-c "poetry run python listener.py"'
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
echo -n "Starting daemon: "$NAME
|
||||
start-stop-daemon \
|
||||
--background \
|
||||
--chdir /app \
|
||||
--chuid flashbot \
|
||||
--start \
|
||||
--quiet \
|
||||
--pidfile $PIDFILE \
|
||||
--make-pidfile \
|
||||
--startas /bin/bash -- -c "poetry run python listener.py"
|
||||
echo "."
|
||||
;;
|
||||
stop)
|
||||
echo -n "Stopping daemon: "$NAME
|
||||
start-stop-daemon --stop --quiet --oknodo --pidfile $PIDFILE
|
||||
rm $PIDFILE
|
||||
echo "."
|
||||
;;
|
||||
tail)
|
||||
tail -f listener.log
|
||||
;;
|
||||
restart)
|
||||
echo -n "Restarting daemon: "$NAME
|
||||
start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile $PIDFILE
|
||||
rm $PIDFILE
|
||||
start-stop-daemon \
|
||||
--background \
|
||||
--chdir /app \
|
||||
--chuid flashbot \
|
||||
--start \
|
||||
--quiet \
|
||||
--pidfile $PIDFILE \
|
||||
--make-pidfile \
|
||||
--startas /bin/bash -- -c "poetry run python listener.py"
|
||||
echo "."
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Usage: "$1" {start|stop|restart|tail}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
126
listener.py
126
listener.py
@ -1,126 +0,0 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
import dramatiq
|
||||
from aiohttp_retry import ExponentialRetry, RetryClient
|
||||
|
||||
from mev_inspect.block import get_latest_block_number
|
||||
from mev_inspect.concurrency import coro
|
||||
from mev_inspect.crud.latest_block_update import (
|
||||
find_latest_block_update,
|
||||
update_latest_block,
|
||||
)
|
||||
from mev_inspect.db import get_inspect_session, get_trace_session
|
||||
from mev_inspect.inspector import MEVInspector
|
||||
from mev_inspect.provider import get_base_provider
|
||||
from mev_inspect.queue.broker import connect_broker
|
||||
from mev_inspect.queue.tasks import (
|
||||
HIGH_PRIORITY,
|
||||
HIGH_PRIORITY_QUEUE,
|
||||
realtime_export_task,
|
||||
)
|
||||
from mev_inspect.signal_handler import GracefulKiller
|
||||
|
||||
logging.basicConfig(filename="listener.log", filemode="a", level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# lag to make sure the blocks we see are settled
|
||||
BLOCK_NUMBER_LAG = 5
|
||||
|
||||
|
||||
@coro
|
||||
async def run():
|
||||
rpc = os.getenv("RPC_URL")
|
||||
if rpc is None:
|
||||
raise RuntimeError("Missing environment variable RPC_URL")
|
||||
|
||||
healthcheck_url = os.getenv("LISTENER_HEALTHCHECK_URL")
|
||||
|
||||
logger.info("Starting...")
|
||||
|
||||
killer = GracefulKiller()
|
||||
|
||||
inspect_db_session = get_inspect_session()
|
||||
trace_db_session = get_trace_session()
|
||||
|
||||
broker = connect_broker()
|
||||
export_actor = dramatiq.actor(
|
||||
realtime_export_task,
|
||||
broker=broker,
|
||||
queue_name=HIGH_PRIORITY_QUEUE,
|
||||
priority=HIGH_PRIORITY,
|
||||
)
|
||||
|
||||
inspector = MEVInspector(rpc)
|
||||
base_provider = get_base_provider(rpc)
|
||||
|
||||
while not killer.kill_now:
|
||||
await inspect_next_block(
|
||||
inspector,
|
||||
inspect_db_session,
|
||||
trace_db_session,
|
||||
base_provider,
|
||||
healthcheck_url,
|
||||
export_actor,
|
||||
)
|
||||
|
||||
logger.info("Stopping...")
|
||||
|
||||
|
||||
async def inspect_next_block(
|
||||
inspector: MEVInspector,
|
||||
inspect_db_session,
|
||||
trace_db_session,
|
||||
base_provider,
|
||||
healthcheck_url,
|
||||
export_actor,
|
||||
):
|
||||
|
||||
latest_block_number = await get_latest_block_number(base_provider)
|
||||
last_written_block = find_latest_block_update(inspect_db_session)
|
||||
|
||||
logger.info(f"Latest block: {latest_block_number}")
|
||||
logger.info(f"Last written block: {last_written_block}")
|
||||
|
||||
if last_written_block is None:
|
||||
# maintain lag if no blocks written yet
|
||||
last_written_block = latest_block_number - BLOCK_NUMBER_LAG - 1
|
||||
|
||||
if last_written_block < (latest_block_number - BLOCK_NUMBER_LAG):
|
||||
block_number = last_written_block + 1
|
||||
|
||||
logger.info(f"Writing block: {block_number}")
|
||||
|
||||
await inspector.inspect_single_block(
|
||||
inspect_db_session=inspect_db_session,
|
||||
trace_db_session=trace_db_session,
|
||||
block=block_number,
|
||||
)
|
||||
|
||||
update_latest_block(inspect_db_session, block_number)
|
||||
|
||||
logger.info(f"Sending block {block_number} for export")
|
||||
export_actor.send(block_number)
|
||||
|
||||
if healthcheck_url:
|
||||
await ping_healthcheck_url(healthcheck_url)
|
||||
else:
|
||||
await asyncio.sleep(5)
|
||||
|
||||
|
||||
async def ping_healthcheck_url(url):
|
||||
retry_options = ExponentialRetry(attempts=3)
|
||||
|
||||
async with RetryClient(
|
||||
raise_for_status=False, retry_options=retry_options
|
||||
) as client:
|
||||
async with client.get(url) as _response:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
run()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
21
loop.py
21
loop.py
@ -1,21 +0,0 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from mev_inspect.signal_handler import GracefulKiller
|
||||
|
||||
logging.basicConfig(filename="loop.log", level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run():
|
||||
logger.info("Starting...")
|
||||
|
||||
killer = GracefulKiller()
|
||||
while not killer.kill_now:
|
||||
time.sleep(1)
|
||||
|
||||
logger.info("Stopping...")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
129
mev
129
mev
@ -1,129 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
DB_NAME=mev_inspect
|
||||
|
||||
function get_kube_secret(){
|
||||
kubectl get secrets $1 -o jsonpath="{.data.$2}" | base64 --decode
|
||||
}
|
||||
|
||||
function get_kube_db_secret(){
|
||||
kubectl get secrets mev-inspect-db-credentials -o jsonpath="{.data.$1}" | base64 --decode
|
||||
}
|
||||
|
||||
function db(){
|
||||
host=$(get_kube_secret "mev-inspect-db-credentials" "host")
|
||||
username=$(get_kube_secret "mev-inspect-db-credentials" "username")
|
||||
password=$(get_kube_secret "mev-inspect-db-credentials" "password")
|
||||
|
||||
kubectl run -i --rm --tty postgres-client-$RANDOM \
|
||||
--env="PGPASSWORD=$password" \
|
||||
--image=jbergknoff/postgresql-client \
|
||||
-- $DB_NAME --host=$host --user=$username
|
||||
}
|
||||
|
||||
function redis(){
|
||||
echo "To continue, enter 'shift + r'"
|
||||
redis_password=$(get_kube_secret "redis" "redis-password")
|
||||
kubectl run -i --rm --tty \
|
||||
--namespace default redis-client-$RANDOM \
|
||||
--env REDIS_PASSWORD=$redis_password \
|
||||
--image docker.io/bitnami/redis:6.2.6-debian-10-r0 \
|
||||
--command -- redis-cli -h redis-master -a $redis_password
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
db)
|
||||
echo "Connecting to $DB_NAME"
|
||||
db
|
||||
;;
|
||||
redis)
|
||||
echo "Connecting to redis"
|
||||
redis
|
||||
;;
|
||||
listener)
|
||||
kubectl exec -ti deploy/mev-inspect -- ./listener $2
|
||||
;;
|
||||
block-list)
|
||||
echo "Backfilling blocks from stdin"
|
||||
kubectl exec -i deploy/mev-inspect -- poetry run enqueue-block-list
|
||||
;;
|
||||
backfill)
|
||||
after_block_number=$2
|
||||
before_block_number=$3
|
||||
|
||||
echo "Backfilling from $after_block_number to $before_block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run enqueue-many-blocks $after_block_number $before_block_number
|
||||
;;
|
||||
inspect)
|
||||
block_number=$2
|
||||
echo "Inspecting block $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run inspect-block $block_number
|
||||
;;
|
||||
inspect-many)
|
||||
after_block_number=$2
|
||||
before_block_number=$3
|
||||
echo "Inspecting from block $after_block_number to $before_block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- \
|
||||
poetry run inspect-many-blocks $after_block_number $before_block_number
|
||||
;;
|
||||
test)
|
||||
shift
|
||||
echo "Running tests"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run pytest tests $@
|
||||
;;
|
||||
fetch)
|
||||
block_number=$2
|
||||
echo "Fetching block $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run fetch-block $block_number
|
||||
;;
|
||||
prices)
|
||||
shift
|
||||
case "$1" in
|
||||
fetch-all)
|
||||
echo "Running price fetch-all"
|
||||
kubectl exec -ti deploy/mev-inspect -- \
|
||||
poetry run fetch-all-prices
|
||||
;;
|
||||
fetch-range)
|
||||
after=$2
|
||||
before=$3
|
||||
echo "Running price fetch-range"
|
||||
kubectl exec -ti deploy/mev-inspect -- \
|
||||
poetry run fetch-range $after $before
|
||||
;;
|
||||
*)
|
||||
echo "prices usage: "$1" {fetch-all}"
|
||||
exit 1
|
||||
esac
|
||||
;;
|
||||
backfill-export)
|
||||
after_block=$2
|
||||
before_block=$3
|
||||
|
||||
echo "Sending $after_block to $before_block export to queue"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run enqueue-many-s3-exports $after_block $before_block
|
||||
;;
|
||||
enqueue-s3-export)
|
||||
block_number=$2
|
||||
|
||||
echo "Sending $block_number export to queue"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run enqueue-s3-export $block_number
|
||||
;;
|
||||
s3-export)
|
||||
block_number=$2
|
||||
|
||||
echo "Exporting $block_number"
|
||||
kubectl exec -ti deploy/mev-inspect -- poetry run s3-export $block_number
|
||||
;;
|
||||
exec)
|
||||
shift
|
||||
kubectl exec -ti deploy/mev-inspect -- $@
|
||||
;;
|
||||
*)
|
||||
echo "Usage: "$1" {db|backfill|inspect|test}"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
@ -1,44 +1,20 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from mev_inspect.schemas.abi import ABI
|
||||
from mev_inspect.schemas.traces import Protocol
|
||||
|
||||
THIS_FILE_DIRECTORY = Path(__file__).parents[0]
|
||||
ABI_DIRECTORY_PATH = THIS_FILE_DIRECTORY / "abis"
|
||||
from mev_inspect.config import load_config
|
||||
from mev_inspect.schemas import ABI
|
||||
|
||||
|
||||
def get_abi_path(abi_name: str, protocol: Optional[Protocol]) -> Optional[Path]:
|
||||
abi_filename = f"{abi_name}.json"
|
||||
abi_path = (
|
||||
ABI_DIRECTORY_PATH / abi_filename
|
||||
if protocol is None
|
||||
else ABI_DIRECTORY_PATH / protocol.value / abi_filename
|
||||
)
|
||||
if abi_path.is_file():
|
||||
return abi_path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# raw abi, for instantiating contract for queries (as opposed to classification, see below)
|
||||
def get_raw_abi(abi_name: str, protocol: Optional[Protocol]) -> Optional[str]:
|
||||
abi_path = get_abi_path(abi_name, protocol)
|
||||
if abi_path is not None:
|
||||
with abi_path.open() as abi_file:
|
||||
return abi_file.read()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_abi(abi_name: str, protocol: Optional[Protocol]) -> Optional[ABI]:
|
||||
abi_path = get_abi_path(abi_name, protocol)
|
||||
if abi_path is not None:
|
||||
with abi_path.open() as abi_file:
|
||||
abi_json = json.load(abi_file)
|
||||
return parse_obj_as(ABI, abi_json)
|
||||
ABI_CONFIG_KEY = "ABI"
|
||||
|
||||
config = load_config()
|
||||
|
||||
|
||||
def get_abi(abi_name: str) -> Optional[ABI]:
|
||||
if abi_name in config[ABI_CONFIG_KEY]:
|
||||
abi_json = json.loads(config[ABI_CONFIG_KEY][abi_name])
|
||||
return parse_obj_as(ABI, abi_json)
|
||||
|
||||
return None
|
||||
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"components":[{"internalType":"contract IERC20TokenV06","name":"makerToken","type":"address"},{"internalType":"contract IERC20TokenV06","name":"takerToken","type":"address"},{"internalType":"uint128","name":"makerAmount","type":"uint128"},{"internalType":"uint128","name":"takerAmount","type":"uint128"},{"internalType":"uint128","name":"takerTokenFeeAmount","type":"uint128"},{"internalType":"address","name":"maker","type":"address"},{"internalType":"address","name":"taker","type":"address"},{"internalType":"address","name":"sender","type":"address"},{"internalType":"address","name":"feeRecipient","type":"address"},{"internalType":"bytes32","name":"pool","type":"bytes32"},{"internalType":"uint64","name":"expiry","type":"uint64"},{"internalType":"uint256","name":"salt","type":"uint256"}],"internalType":"struct LibNativeOrder.LimitOrder[]","name":"orders","type":"tuple[]"},{"components":[{"internalType":"enum LibSignature.SignatureType","name":"signatureType","type":"uint8"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"internalType":"struct LibSignature.Signature[]","name":"signatures","type":"tuple[]"},{"internalType":"uint128[]","name":"takerTokenFillAmounts","type":"uint128[]"},{"internalType":"bool","name":"revertIfIncomplete","type":"bool"}],"name":"batchFillLimitOrders","outputs":[{"internalType":"uint128[]","name":"takerTokenFilledAmounts","type":"uint128[]"},{"internalType":"uint128[]","name":"makerTokenFilledAmounts","type":"uint128[]"}],"stateMutability":"payable","type":"function"},{"inputs":[{"components":[{"internalType":"contract IERC20TokenV06","name":"makerToken","type":"address"},{"internalType":"contract IERC20TokenV06","name":"takerToken","type":"address"},{"internalType":"uint128","name":"makerAmount","type":"uint128"},{"internalType":"uint128","name":"takerAmount","type":"uint128"},{"internalType":"address","name":"maker","type":"address"},{"internalType":"address","name":"taker","type":"address"},{"internalType":"address","name":"txOrigin","type":"address"},{"internalType":"bytes32","name":"pool","type":"bytes32"},{"internalType":"uint64","name":"expiry","type":"uint64"},{"internalType":"uint256","name":"salt","type":"uint256"}],"internalType":"struct LibNativeOrder.RfqOrder[]","name":"orders","type":"tuple[]"},{"components":[{"internalType":"enum LibSignature.SignatureType","name":"signatureType","type":"uint8"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"internalType":"struct LibSignature.Signature[]","name":"signatures","type":"tuple[]"},{"internalType":"uint128[]","name":"takerTokenFillAmounts","type":"uint128[]"},{"internalType":"bool","name":"revertIfIncomplete","type":"bool"}],"name":"batchFillRfqOrders","outputs":[{"internalType":"uint128[]","name":"takerTokenFilledAmounts","type":"uint128[]"},{"internalType":"uint128[]","name":"makerTokenFilledAmounts","type":"uint128[]"}],"stateMutability":"nonpayable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"address","name":"target","type":"address"},{"internalType":"bytes","name":"callData","type":"bytes"}],"name":"bootstrap","outputs":[],"stateMutability":"nonpayable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[],"name":"FEATURE_NAME","outputs":[{"internalType":"string","name":"name","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"FEATURE_VERSION","outputs":[{"internalType":"uint256","name":"version","type":"uint256"}],"stateMutability":"view","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"contract IERC20TokenV06","name":"erc20","type":"address"},{"internalType":"uint256","name":"amountOut","type":"uint256"},{"internalType":"address payable","name":"recipientWallet","type":"address"}],"name":"transferTrappedTokensTo","outputs":[],"stateMutability":"nonpayable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"anonymous":false,"inputs":[{"indexed":false,"internalType":"contract IERC20TokenV06","name":"inputToken","type":"address"},{"indexed":false,"internalType":"contract IERC20TokenV06","name":"outputToken","type":"address"},{"indexed":false,"internalType":"uint256","name":"inputTokenAmount","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"outputTokenAmount","type":"uint256"},{"indexed":false,"internalType":"contract ILiquidityProvider","name":"provider","type":"address"},{"indexed":false,"internalType":"address","name":"recipient","type":"address"}],"name":"LiquidityProviderSwap","type":"event"},{"inputs":[{"internalType":"contract IERC20TokenV06","name":"inputToken","type":"address"},{"internalType":"contract IERC20TokenV06","name":"outputToken","type":"address"},{"internalType":"contract ILiquidityProvider","name":"provider","type":"address"},{"internalType":"address","name":"recipient","type":"address"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"bytes","name":"auxiliaryData","type":"bytes"}],"name":"sellToLiquidityProvider","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"payable","type":"function"}]
|
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"contract IERC20TokenV06","name":"outputToken","type":"address"},{"components":[{"internalType":"enum IMultiplexFeature.MultiplexSubcall","name":"id","type":"uint8"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IMultiplexFeature.BatchSellSubcall[]","name":"calls","type":"tuple[]"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"}],"name":"multiplexBatchSellEthForToken","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"contract IERC20TokenV06","name":"inputToken","type":"address"},{"components":[{"internalType":"enum IMultiplexFeature.MultiplexSubcall","name":"id","type":"uint8"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IMultiplexFeature.BatchSellSubcall[]","name":"calls","type":"tuple[]"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"}],"name":"multiplexBatchSellTokenForEth","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"contract IERC20TokenV06","name":"inputToken","type":"address"},{"internalType":"contract IERC20TokenV06","name":"outputToken","type":"address"},{"components":[{"internalType":"enum IMultiplexFeature.MultiplexSubcall","name":"id","type":"uint8"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IMultiplexFeature.BatchSellSubcall[]","name":"calls","type":"tuple[]"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"}],"name":"multiplexBatchSellTokenForToken","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address[]","name":"tokens","type":"address[]"},{"components":[{"internalType":"enum IMultiplexFeature.MultiplexSubcall","name":"id","type":"uint8"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IMultiplexFeature.MultiHopSellSubcall[]","name":"calls","type":"tuple[]"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"}],"name":"multiplexMultiHopSellEthForToken","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"address[]","name":"tokens","type":"address[]"},{"components":[{"internalType":"enum IMultiplexFeature.MultiplexSubcall","name":"id","type":"uint8"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IMultiplexFeature.MultiHopSellSubcall[]","name":"calls","type":"tuple[]"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"}],"name":"multiplexMultiHopSellTokenForEth","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address[]","name":"tokens","type":"address[]"},{"components":[{"internalType":"enum IMultiplexFeature.MultiplexSubcall","name":"id","type":"uint8"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IMultiplexFeature.MultiHopSellSubcall[]","name":"calls","type":"tuple[]"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"}],"name":"multiplexMultiHopSellTokenForToken","outputs":[{"internalType":"uint256","name":"boughtAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"}]
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
||||
[{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"caller","type":"address"},{"indexed":false,"internalType":"address","name":"migrator","type":"address"},{"indexed":false,"internalType":"address","name":"newOwner","type":"address"}],"name":"Migrated","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"inputs":[{"internalType":"address","name":"target","type":"address"},{"internalType":"bytes","name":"data","type":"bytes"},{"internalType":"address","name":"newOwner","type":"address"}],"name":"migrate","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"ownerAddress","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"contract IERC20TokenV06[]","name":"tokens","type":"address[]"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"enum IPancakeSwapFeature.ProtocolFork","name":"fork","type":"uint8"}],"name":"sellToPancakeSwap","outputs":[{"internalType":"uint256","name":"buyAmount","type":"uint256"}],"stateMutability":"payable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"anonymous":false,"inputs":[{"indexed":true,"internalType":"bytes4","name":"selector","type":"bytes4"},{"indexed":false,"internalType":"address","name":"oldImpl","type":"address"},{"indexed":false,"internalType":"address","name":"newImpl","type":"address"}],"name":"ProxyFunctionUpdated","type":"event"},{"inputs":[{"internalType":"bytes4","name":"selector","type":"bytes4"},{"internalType":"address","name":"impl","type":"address"}],"name":"extend","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes4","name":"selector","type":"bytes4"},{"internalType":"uint256","name":"idx","type":"uint256"}],"name":"getRollbackEntryAtIndex","outputs":[{"internalType":"address","name":"impl","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes4","name":"selector","type":"bytes4"}],"name":"getRollbackLength","outputs":[{"internalType":"uint256","name":"rollbackLength","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes4","name":"selector","type":"bytes4"},{"internalType":"address","name":"targetImpl","type":"address"}],"name":"rollback","outputs":[],"stateMutability":"nonpayable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[],"name":"testFn","outputs":[{"internalType":"uint256","name":"id","type":"uint256"}],"stateMutability":"view","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"contract IERC20TokenV06","name":"token","type":"address"},{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"_spendERC20Tokens","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getAllowanceTarget","outputs":[{"internalType":"address","name":"target","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"contract IERC20TokenV06","name":"token","type":"address"},{"internalType":"address","name":"owner","type":"address"}],"name":"getSpendableERC20BalanceOf","outputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"stateMutability":"view","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"quoteSigner","type":"address"}],"name":"QuoteSignerUpdated","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"taker","type":"address"},{"indexed":false,"internalType":"address","name":"inputToken","type":"address"},{"indexed":false,"internalType":"address","name":"outputToken","type":"address"},{"indexed":false,"internalType":"uint256","name":"inputTokenAmount","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"outputTokenAmount","type":"uint256"}],"name":"TransformedERC20","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"address","name":"transformerDeployer","type":"address"}],"name":"TransformerDeployerUpdated","type":"event"},{"inputs":[{"components":[{"internalType":"address payable","name":"taker","type":"address"},{"internalType":"contract IERC20TokenV06","name":"inputToken","type":"address"},{"internalType":"contract IERC20TokenV06","name":"outputToken","type":"address"},{"internalType":"uint256","name":"inputTokenAmount","type":"uint256"},{"internalType":"uint256","name":"minOutputTokenAmount","type":"uint256"},{"components":[{"internalType":"uint32","name":"deploymentNonce","type":"uint32"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct ITransformERC20Feature.Transformation[]","name":"transformations","type":"tuple[]"},{"internalType":"bool","name":"useSelfBalance","type":"bool"},{"internalType":"address payable","name":"recipient","type":"address"}],"internalType":"struct ITransformERC20Feature.TransformERC20Args","name":"args","type":"tuple"}],"name":"_transformERC20","outputs":[{"internalType":"uint256","name":"outputTokenAmount","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[],"name":"createTransformWallet","outputs":[{"internalType":"contract IFlashWallet","name":"wallet","type":"address"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"getQuoteSigner","outputs":[{"internalType":"address","name":"signer","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getTransformWallet","outputs":[{"internalType":"contract IFlashWallet","name":"wallet","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getTransformerDeployer","outputs":[{"internalType":"address","name":"deployer","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"quoteSigner","type":"address"}],"name":"setQuoteSigner","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"transformerDeployer","type":"address"}],"name":"setTransformerDeployer","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"contract IERC20TokenV06","name":"inputToken","type":"address"},{"internalType":"contract IERC20TokenV06","name":"outputToken","type":"address"},{"internalType":"uint256","name":"inputTokenAmount","type":"uint256"},{"internalType":"uint256","name":"minOutputTokenAmount","type":"uint256"},{"components":[{"internalType":"uint32","name":"deploymentNonce","type":"uint32"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct ITransformERC20Feature.Transformation[]","name":"transformations","type":"tuple[]"}],"name":"transformERC20","outputs":[{"internalType":"uint256","name":"outputTokenAmount","type":"uint256"}],"stateMutability":"payable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"contract IERC20TokenV06[]","name":"tokens","type":"address[]"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"bool","name":"isSushi","type":"bool"}],"name":"sellToUniswap","outputs":[{"internalType":"uint256","name":"buyAmount","type":"uint256"}],"stateMutability":"payable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[{"internalType":"bytes","name":"encodedPath","type":"bytes"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"address","name":"recipient","type":"address"}],"name":"_sellHeldTokenForTokenToUniswapV3","outputs":[{"internalType":"uint256","name":"buyAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes","name":"encodedPath","type":"bytes"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"address","name":"recipient","type":"address"}],"name":"sellEthForTokenToUniswapV3","outputs":[{"internalType":"uint256","name":"buyAmount","type":"uint256"}],"stateMutability":"payable","type":"function"},{"inputs":[{"internalType":"bytes","name":"encodedPath","type":"bytes"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"address payable","name":"recipient","type":"address"}],"name":"sellTokenForEthToUniswapV3","outputs":[{"internalType":"uint256","name":"buyAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes","name":"encodedPath","type":"bytes"},{"internalType":"uint256","name":"sellAmount","type":"uint256"},{"internalType":"uint256","name":"minBuyAmount","type":"uint256"},{"internalType":"address","name":"recipient","type":"address"}],"name":"sellTokenForTokenToUniswapV3","outputs":[{"internalType":"uint256","name":"buyAmount","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"int256","name":"amount0Delta","type":"int256"},{"internalType":"int256","name":"amount1Delta","type":"int256"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"uniswapV3SwapCallback","outputs":[],"stateMutability":"nonpayable","type":"function"}]
|
@ -1 +0,0 @@
|
||||
[{"inputs":[],"name":"deployer","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address payable","name":"ethRecipient","type":"address"}],"name":"die","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"components":[{"internalType":"address payable","name":"sender","type":"address"},{"internalType":"address payable","name":"taker","type":"address"},{"internalType":"bytes","name":"data","type":"bytes"}],"internalType":"struct IERC20Transformer.TransformContext","name":"context","type":"tuple"}],"name":"transform","outputs":[{"internalType":"bytes4","name":"success","type":"bytes4"}],"stateMutability":"nonpayable","type":"function"}]
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
||||
[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"guy","type":"address"},{"name":"wad","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"src","type":"address"},{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"name":"wad","type":"uint256"}],"name":"withdraw","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"dst","type":"address"},{"name":"wad","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"deposit","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"address"},{"name":"","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"payable":true,"stateMutability":"payable","type":"fallback"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"guy","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"dst","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Deposit","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"src","type":"address"},{"indexed":false,"name":"wad","type":"uint256"}],"name":"Withdrawal","type":"event"}]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user