Add typst
and tinymist
to shell
Start writing notes Notes on Uniswap V2's optimum Fix error in formula Test `computeAmountInt` using various deltas Add `concurrency` to the default configuration file Remove unused imports Correctly propagate error Allow dead code Make the priority queue a real FIFO Refactor: remove priority queue as stream and use channels Increase buffer size New `flashArbitrage` function Comment with some ideas Add pragma version Refactor: decrease the amount of calls Remove unused code Re-enable tests Remove comment Process known pairs when started Avoid re-allocating a new provider every time Ignore `nixos.qcow2` file created by the VM Add support for `aarch64-linux` Add NixOS module and VM configuration Add `itertools` Add arbitrage opportunity detection Implement `fallback` method for non standard callbacks Add more logs Fix sign error in optimum formula Add deployment scripts and `agenix-shell` secrets Bump cargo packages Fix typo Print out an error if processing a pair goes wrong Add `actionlint` to formatters Fix typo Add TODO comment Remove not relevant anymore comment Big refactor - process actions always in the correct order avoiding corner cases - avoid using semaphores New API key Add `age` to dev shell Used by Emacs' `agenix-mode` on my system Fix parametric deploy scripts Add `run-forge-tests` flake app Remove fork URL from Solidity source Remove `pairDir` argument Add link to `ArbitrageManager`'s ABI WIP
This commit is contained in:
parent
7a1e03ee7a
commit
fb378c4931
17 changed files with 1222 additions and 441 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -6,4 +6,5 @@ onchain/out
|
|||
onchain/cache
|
||||
.direnv
|
||||
.pre-commit-config.yaml
|
||||
**/result
|
||||
**/result
|
||||
nixos.qcow2
|
211
flake.lock
generated
211
flake.lock
generated
|
@ -1,5 +1,28 @@
|
|||
{
|
||||
"nodes": {
|
||||
"agenix-shell": {
|
||||
"inputs": {
|
||||
"flake-parts": "flake-parts",
|
||||
"flake-root": "flake-root",
|
||||
"git-hooks-nix": "git-hooks-nix",
|
||||
"nix-github-actions": "nix-github-actions",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"treefmt-nix": "treefmt-nix"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1747248416,
|
||||
"narHash": "sha256-mthvi7EARHz01rqyJEvyZtrXooKEEoLkt7Fhu2W1djM=",
|
||||
"owner": "aciceri",
|
||||
"repo": "agenix-shell",
|
||||
"rev": "df2787101d5feb8f82e50d100ad37fc0b6c53b75",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "aciceri",
|
||||
"repo": "agenix-shell",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
|
@ -16,10 +39,44 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat_2": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1696426674,
|
||||
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-parts": {
|
||||
"inputs": {
|
||||
"nixpkgs-lib": "nixpkgs-lib"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1743550720,
|
||||
"narHash": "sha256-hIshGgKZCgWh6AYJpJmRgFdR3WUbkY04o82X05xqQiY=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"rev": "c621e8422220273271f52058f618c94e405bb0f5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-parts_2": {
|
||||
"inputs": {
|
||||
"nixpkgs-lib": "nixpkgs-lib_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1741352980,
|
||||
"narHash": "sha256-+u2UunDA4Cl5Fci3m7S643HzKmIDAe+fiXrLqYsR2fs=",
|
||||
|
@ -49,6 +106,21 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-root_2": {
|
||||
"locked": {
|
||||
"lastModified": 1723604017,
|
||||
"narHash": "sha256-rBtQ8gg+Dn4Sx/s+pvjdq3CB2wQNzx9XGFq/JVGCB6k=",
|
||||
"owner": "srid",
|
||||
"repo": "flake-root",
|
||||
"rev": "b759a56851e10cb13f6b8e5698af7b59c44be26e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "srid",
|
||||
"repo": "flake-root",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"forge-std": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
|
@ -68,8 +140,8 @@
|
|||
},
|
||||
"git-hooks": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"gitignore": "gitignore",
|
||||
"flake-compat": "flake-compat_2",
|
||||
"gitignore": "gitignore_2",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
|
@ -88,7 +160,52 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"git-hooks-nix": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"gitignore": "gitignore",
|
||||
"nixpkgs": [
|
||||
"agenix-shell",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1746537231,
|
||||
"narHash": "sha256-Wb2xeSyOsCoTCTj7LOoD6cdKLEROyFAArnYoS+noCWo=",
|
||||
"owner": "cachix",
|
||||
"repo": "git-hooks.nix",
|
||||
"rev": "fa466640195d38ec97cf0493d6d6882bc4d14969",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "cachix",
|
||||
"repo": "git-hooks.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"agenix-shell",
|
||||
"git-hooks-nix",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1709087332,
|
||||
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "gitignore.nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore_2": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"git-hooks",
|
||||
|
@ -110,6 +227,27 @@
|
|||
}
|
||||
},
|
||||
"nix-github-actions": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"agenix-shell",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1737420293,
|
||||
"narHash": "sha256-F1G5ifvqTpJq7fdkT34e/Jy9VCyzd5XfJ9TO8fHhJWE=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"rev": "f4158fa080ef4503c8f4c820967d946c2af31ec9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nix-github-actions",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix-github-actions_2": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
|
@ -131,11 +269,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1742669843,
|
||||
"narHash": "sha256-G5n+FOXLXcRx+3hCJ6Rt6ZQyF1zqQ0DL0sWAMn2Nk0w=",
|
||||
"lastModified": 1746663147,
|
||||
"narHash": "sha256-Ua0drDHawlzNqJnclTJGf87dBmaO/tn7iZ+TCkTRpRc=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1e5b653dff12029333a6546c11e108ede13052eb",
|
||||
"rev": "dda3dcd3fe03e991015e9a74b22d35950f264a54",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -146,6 +284,21 @@
|
|||
}
|
||||
},
|
||||
"nixpkgs-lib": {
|
||||
"locked": {
|
||||
"lastModified": 1743296961,
|
||||
"narHash": "sha256-b1EdN3cULCqtorQ4QeWgLMrd5ZGOjLSLemfa00heasc=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"rev": "e4822aea2a6d1cdd36653c134cacfd64c97ff4fa",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-lib_2": {
|
||||
"locked": {
|
||||
"lastModified": 1740877520,
|
||||
"narHash": "sha256-oiwv/ZK/2FhGxrCkQkB83i7GnWXPPLzoqFHpDD3uYpk=",
|
||||
|
@ -160,18 +313,56 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1742669843,
|
||||
"narHash": "sha256-G5n+FOXLXcRx+3hCJ6Rt6ZQyF1zqQ0DL0sWAMn2Nk0w=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1e5b653dff12029333a6546c11e108ede13052eb",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-parts": "flake-parts",
|
||||
"flake-root": "flake-root",
|
||||
"agenix-shell": "agenix-shell",
|
||||
"flake-parts": "flake-parts_2",
|
||||
"flake-root": "flake-root_2",
|
||||
"forge-std": "forge-std",
|
||||
"git-hooks": "git-hooks",
|
||||
"nix-github-actions": "nix-github-actions",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"treefmt-nix": "treefmt-nix"
|
||||
"nix-github-actions": "nix-github-actions_2",
|
||||
"nixpkgs": "nixpkgs_2",
|
||||
"treefmt-nix": "treefmt-nix_2"
|
||||
}
|
||||
},
|
||||
"treefmt-nix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"agenix-shell",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1746216483,
|
||||
"narHash": "sha256-4h3s1L/kKqt3gMDcVfN8/4v2jqHrgLIe4qok4ApH5x4=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "29ec5026372e0dec56f890e50dbe4f45930320fd",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"treefmt-nix_2": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
|
|
178
flake.nix
178
flake.nix
|
@ -10,6 +10,7 @@
|
|||
url = "github:cachix/git-hooks.nix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
agenix-shell.url = "github:aciceri/agenix-shell";
|
||||
flake-root.url = "github:srid/flake-root";
|
||||
nix-github-actions = {
|
||||
url = "github:nix-community/nix-github-actions";
|
||||
|
@ -22,15 +23,23 @@
|
|||
};
|
||||
|
||||
outputs = inputs:
|
||||
inputs.flake-parts.lib.mkFlake { inherit inputs; } ({ config, lib, ... }: {
|
||||
systems = [ "x86_64-linux" ];
|
||||
inputs.flake-parts.lib.mkFlake { inherit inputs; } (flake@{ config, lib, moduleWithSystem, withSystem, ... }: {
|
||||
systems = [ "x86_64-linux" "aarch64-linux" ];
|
||||
|
||||
imports = [
|
||||
inputs.git-hooks.flakeModule
|
||||
inputs.treefmt-nix.flakeModule
|
||||
inputs.flake-root.flakeModule
|
||||
imports = with inputs; [
|
||||
git-hooks.flakeModule
|
||||
treefmt-nix.flakeModule
|
||||
flake-root.flakeModule
|
||||
agenix-shell.flakeModules.agenix-shell
|
||||
];
|
||||
|
||||
agenix-shell = {
|
||||
secrets = {
|
||||
ALCHEMY_KEY.file = ./secrets/alchemy_key.age;
|
||||
WALLET_PRIVATE_KEY.file = ./secrets/wallet_private_key.age;
|
||||
};
|
||||
};
|
||||
|
||||
perSystem = { pkgs, config, ... }: {
|
||||
treefmt.config = {
|
||||
flakeFormatter = true;
|
||||
|
@ -38,6 +47,7 @@
|
|||
programs = {
|
||||
nixpkgs-fmt.enable = true;
|
||||
rustfmt.enable = true;
|
||||
actionlint.enable = true;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -52,9 +62,11 @@
|
|||
};
|
||||
|
||||
devShells.default = pkgs.mkShell {
|
||||
packages = with pkgs; [ cargo rustc rust-analyzer clippy foundry ];
|
||||
packages = with pkgs; [ cargo rustc rust-analyzer clippy foundry typst tinymist age ragenix ];
|
||||
inputsFrom = [ config.flake-root.devShell ];
|
||||
shellHook = ''
|
||||
source ${lib.getExe config.agenix-shell.installationScript}
|
||||
|
||||
# forge will use this directory to download the solc compilers
|
||||
mkdir -p $HOME/.svm
|
||||
|
||||
|
@ -95,21 +107,167 @@
|
|||
OPENSSL_INCLUDE_DIR = "${pkgs.openssl.dev}/include";
|
||||
PKG_CONFIG_PATH = "${pkgs.openssl.dev}/lib/pkgconfig:$PKG_CONFIG_PATH";
|
||||
};
|
||||
meta.mainProgram = "arbi";
|
||||
};
|
||||
|
||||
arbi_sample_config_kdl = pkgs.writeText "arbi-sample-config.kdl" ''
|
||||
endpoint "wss://eth-mainnet.g.alchemy.com/v2/<REDACTED>"
|
||||
pairs_file "pairs.json"
|
||||
concurrency 5
|
||||
'';
|
||||
};
|
||||
|
||||
run-forge-tests = pkgs.writeShellScriptBin "run-forge-tests" ''
|
||||
pushd "$FLAKE_ROOT/onchain"
|
||||
forge test \
|
||||
--fork-url "wss://mainnet.infura.io/ws/v3/$ALCHEMY_KEY" \
|
||||
--via-ir \
|
||||
-vvv
|
||||
popd
|
||||
'';
|
||||
|
||||
run-vm = pkgs.writeShellScriptBin "run-vm" (lib.getExe flake.config.flake.nixosConfigurations.vm.config.system.build.vm);
|
||||
} // lib.genAttrs [ "polygon-mainnet" ] (network: pkgs.writeShellScriptBin "deploy-${network}" ''
|
||||
pushd "$FLAKE_ROOT/onchain"
|
||||
forge create \
|
||||
--rpc-url "wss://${network}.infura.io/ws/v3/$ALCHEMY_KEY" \
|
||||
--private-key "$WALLET_PRIVATE_KEY" \
|
||||
--via-ir \
|
||||
--broadcast \
|
||||
src/ArbitrageManager.sol:ArbitrageManager
|
||||
popd
|
||||
'');
|
||||
|
||||
|
||||
checks = {
|
||||
inherit (config.packages) arbi;
|
||||
};
|
||||
};
|
||||
|
||||
flake.githubActions = inputs.nix-github-actions.lib.mkGithubMatrix {
|
||||
checks = lib.getAttrs [ "x86_64-linux" ] config.flake.checks;
|
||||
flake = {
|
||||
githubActions = inputs.nix-github-actions.lib.mkGithubMatrix {
|
||||
checks = lib.getAttrs [ "x86_64-linux" ] config.flake.checks;
|
||||
};
|
||||
|
||||
nixosConfigurations.vm = withSystem "x86_64-linux" (ctx: inputs.nixpkgs.lib.nixosSystem {
|
||||
system = "x86_64-linux";
|
||||
modules = [
|
||||
({ pkgs, modulesPath, ... }: {
|
||||
imports = [
|
||||
"${modulesPath}/virtualisation/qemu-vm.nix"
|
||||
config.flake.nixosModules.arbi
|
||||
];
|
||||
|
||||
services.getty.autologinUser = "root";
|
||||
services.openssh.settings.PasswordAuthentication = lib.mkForce true;
|
||||
services.openssh.settings.PermitRootLogin = lib.mkForce "yes";
|
||||
users.users.root.password = "";
|
||||
|
||||
virtualisation = {
|
||||
graphics = false;
|
||||
memorySize = 2048;
|
||||
diskSize = 10000;
|
||||
forwardPorts = [
|
||||
{
|
||||
from = "host";
|
||||
host.port = 2222;
|
||||
guest.port = 22;
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
system.stateVersion = "25.05";
|
||||
|
||||
services.arbi = {
|
||||
enable = true;
|
||||
log_level = "debug";
|
||||
configFile = pkgs.writeText "arbi-config.kdl" ''
|
||||
endpoint "wss://eth-mainnet.g.alchemy.com/v2/kkDMaLVYpWQA0GsCYNFvAODnAxCCiamv"
|
||||
pairs_file "pairs.json"
|
||||
concurrency 5
|
||||
'';
|
||||
};
|
||||
})
|
||||
];
|
||||
});
|
||||
|
||||
nixosModules = {
|
||||
arbi = moduleWithSystem ({ config }: nixos@{ lib, utils, ... }:
|
||||
let
|
||||
cfg = nixos.config.services.arbi;
|
||||
in
|
||||
{
|
||||
options.services.arbi = {
|
||||
enable = lib.mkEnableOption "arbi";
|
||||
package = lib.mkOption {
|
||||
type = lib.types.package;
|
||||
default = config.packages.arbi;
|
||||
};
|
||||
log_level = lib.mkOption {
|
||||
type = lib.types.enum [ "debug" "trace" "warn" "error" "info" ];
|
||||
default = "info";
|
||||
};
|
||||
configFile = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
};
|
||||
dataDir = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
default = "/var/lib/arbi";
|
||||
};
|
||||
user = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "arbi";
|
||||
};
|
||||
group = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "arbi";
|
||||
};
|
||||
};
|
||||
config = lib.mkIf cfg.enable {
|
||||
environment.systemPackages = [ cfg.package ];
|
||||
|
||||
users.users.arbi = lib.mkIf (cfg.user == "arbi") {
|
||||
isSystemUser = true;
|
||||
group = cfg.group;
|
||||
};
|
||||
|
||||
users.groups.arbi = lib.mkIf (cfg.group == "arbi") { };
|
||||
|
||||
systemd.tmpfiles.settings."10-arbi" = {
|
||||
${cfg.dataDir}.d = {
|
||||
inherit (cfg) user group;
|
||||
mode = "0755";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.arbi = {
|
||||
description = "Arbitrage bot";
|
||||
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment.ARBI_LOG_LEVEL = cfg.log_level;
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = utils.escapeSystemdExecArgs [
|
||||
(lib.getExe cfg.package)
|
||||
"--config"
|
||||
cfg.configFile
|
||||
"run"
|
||||
];
|
||||
|
||||
KillSignal = "SIGINT";
|
||||
Restart = "on-failure";
|
||||
RestartSec = "5s";
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
WorkingDirectory = cfg.dataDir;
|
||||
UMask = "0022";
|
||||
};
|
||||
};
|
||||
};
|
||||
});
|
||||
default = config.flake.nixosModules.arbi;
|
||||
};
|
||||
};
|
||||
});
|
||||
}
|
||||
|
|
BIN
notes/notes.pdf
Normal file
BIN
notes/notes.pdf
Normal file
Binary file not shown.
86
notes/notes.typ
Normal file
86
notes/notes.typ
Normal file
|
@ -0,0 +1,86 @@
|
|||
|
||||
= Notes
|
||||
|
||||
Miscellaneous notes about *arbi*.
|
||||
|
||||
== Uniswap V2's optimal input amount
|
||||
|
||||
We consider two Uniswap V2-like pairs $A$ and $B$ both relative to the same two tokens.
|
||||
Let $X_A$ and $Y_A$ the reserves of the two tokens on the pair $A$ and $Y_A$ and $Y_B$ the reserves on the pair $B$ and assume that we want to perform 2 chained this way.
|
||||
|
||||
$
|
||||
... ->^y^* A ->^(x_"out") B ->^(y_"out") ...
|
||||
$
|
||||
|
||||
with $y^*$ the optimum amount to swap in order to maximize the gain function $G(y) = y_"out" - y^*$
|
||||
|
||||
Let $0 <= f <= 1$ be the fee ($.03$ by deault on Uniswap V2), we know#footnote[https://www.youtube.com/watch?v=9EKksG-fF1k] that the optimum is one of the roots of the following second-grade equation:
|
||||
|
||||
$
|
||||
k^2y^2 + 2k Y_A X_B y + (Y_A X_B)^2 - (1-f)^2 X_A Y_B Y_A X_B = 0
|
||||
$
|
||||
|
||||
where
|
||||
|
||||
$
|
||||
k = (1-f)X_B + (1-f)^2 X_A
|
||||
$
|
||||
|
||||
In the Uniswap V2 implementation we have that $1-f = phi/1000$ (with $phi = 997$).
|
||||
Then we can rewrite:
|
||||
|
||||
$
|
||||
k^2y^2 + 2k Y_A X_B y + (Y_A X_B)^2 - (phi/1000)^2 X_A Y_B Y_A X_B = 0
|
||||
$
|
||||
|
||||
and
|
||||
|
||||
$
|
||||
k = phi/1000 X_B + phi^2/1000^2 X_A
|
||||
$
|
||||
|
||||
Let $a$, $b$ and $c$ be the three second-grade equation coefficients.
|
||||
|
||||
$
|
||||
a = k^2
|
||||
$
|
||||
|
||||
$
|
||||
b = 2k Y_A X_B
|
||||
$
|
||||
|
||||
$
|
||||
c = (Y_A X_B)^2 - (phi/1000)^2 X_A Y_B Y_A X_B
|
||||
$
|
||||
|
||||
Since $b$ is even we can find the roots with
|
||||
|
||||
$
|
||||
y_i = (-b/2 plus.minus sqrt((b^2-4a c)/4))/a
|
||||
$
|
||||
|
||||
Replacing our values:
|
||||
|
||||
$
|
||||
(- k Y_A X_B y plus.minus sqrt(k^2 (Y_A X_B) ^2 ((Y_A X_B) ^2 -phi^2/1000^2 X_A Y_B X_B Y_A)))/k^2
|
||||
$
|
||||
$
|
||||
= -(Y_A X_B)/k plus.minus 1/k^2 sqrt(k^2 ((Y_A X_B)^2) -(Y_A X_B)^2 + phi^2/1000^2X_A Y_B X_B Y_A)
|
||||
$
|
||||
$
|
||||
= -(Y_A X_B)/k plus.minus 1/k sqrt((phi^2 X_B Y_B X_B Y_A )/1000^2)
|
||||
$
|
||||
|
||||
Which, since the square root is positive, can be positive only considering $+$. In conclusion we get the following formula for the optimal amount of token $Y$:
|
||||
|
||||
$
|
||||
y^* = 1/k (sqrt((phi^2 X_A Y_B X_B Y_A) / 1000^2) - Y_A X_B)
|
||||
$
|
||||
|
||||
=== Solidity implementation details
|
||||
|
||||
- Integer square roots can be effectively and cheaply computed using the Babylonian method #footnote[https://ethereum.stackexchange.com/a/97540/66173]
|
||||
- The square root can lead to overflow, in that case it can be convenient splitting it into something like
|
||||
$
|
||||
sqrt(phi times X_A div 1000 times Y_B) sqrt(phi times X_B div 1000 times Y_A)
|
||||
$
|
569
offchain/Cargo.lock
generated
569
offchain/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -9,6 +9,7 @@ clap = { version = "4.5.32", features = ["derive", "env"] }
|
|||
env_logger = "0.11.7"
|
||||
eyre = "0.6.12"
|
||||
futures-util = "0.3.31"
|
||||
itertools = "0.14.0"
|
||||
kdl = "6.3.4"
|
||||
log = "0.4.27"
|
||||
miette = { version = "7.5.0", features = ["fancy"] }
|
||||
|
|
1
offchain/abi/ArbitrageManager.json
Symbolic link
1
offchain/abi/ArbitrageManager.json
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../onchain/out/ArbitrageManager.sol/ArbitrageManager.json
|
|
@ -1,7 +1,9 @@
|
|||
use std::{collections::HashMap, path::PathBuf, str::FromStr};
|
||||
|
||||
use alloy::primitives::U256;
|
||||
use alloy::primitives::{aliases::U112, Address};
|
||||
|
||||
use itertools::Itertools;
|
||||
use miette::{miette, Result};
|
||||
use serde::de::{self, Visitor};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
@ -10,11 +12,11 @@ use log::{debug, info};
|
|||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Pair {
|
||||
token0: Address,
|
||||
token1: Address,
|
||||
reserve0: U112,
|
||||
reserve1: U112,
|
||||
factory: Address,
|
||||
pub token0: Address,
|
||||
pub token1: Address,
|
||||
pub reserve0: U112,
|
||||
pub reserve1: U112,
|
||||
pub factory: Address,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, Hash, PartialEq)]
|
||||
|
@ -68,6 +70,14 @@ impl<'de> Deserialize<'de> for AddressPair {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ArbitrageOpportunity {
|
||||
pair_a: Address,
|
||||
pair_b: Address,
|
||||
direction: bool, // true means token0 -> token1 -> token0
|
||||
optimum: U256,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Pairs {
|
||||
pairs: HashMap<Address, Pair>,
|
||||
|
@ -97,10 +107,17 @@ impl Pairs {
|
|||
}
|
||||
}
|
||||
|
||||
fn get(&self, address: Address) -> Option<&Pair> {
|
||||
#[allow(dead_code)]
|
||||
pub fn get(&self, address: Address) -> Option<&Pair> {
|
||||
self.pairs.get(&address)
|
||||
}
|
||||
|
||||
pub fn get_tokens(&self, address: Address) -> Option<(Address, Address)> {
|
||||
self.pairs
|
||||
.get(&address)
|
||||
.map(|pair| (pair.token0, pair.token1))
|
||||
}
|
||||
|
||||
pub fn add(
|
||||
&mut self,
|
||||
pair: Address,
|
||||
|
@ -133,11 +150,11 @@ impl Pairs {
|
|||
info!("First time seeing pair {}, adding it", { pair });
|
||||
|
||||
match self.by_tokens.get_mut(&AddressPair(token0, token1)) {
|
||||
Some(tokens) => {
|
||||
tokens.push(pair);
|
||||
Some(pairs) => {
|
||||
pairs.push(pair);
|
||||
info!(
|
||||
"Already know {} pairs with tokens {:?} and {:?}",
|
||||
tokens.len(),
|
||||
pairs.len(),
|
||||
token0,
|
||||
token1
|
||||
);
|
||||
|
@ -161,6 +178,107 @@ impl Pairs {
|
|||
let data = serde_json::to_string(&self).map_err(|e| miette!(e))?;
|
||||
std::fs::write(filename, data).map_err(|e| miette!(e))?;
|
||||
|
||||
info!("{} Pairs saved to {:?}", self.pairs.len(), filename);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.pairs.len()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> std::collections::hash_map::Iter<'_, Address, Pair> {
|
||||
self.pairs.iter()
|
||||
}
|
||||
|
||||
pub fn get_addresses(&self) -> Vec<Address> {
|
||||
self.pairs.keys().cloned().collect()
|
||||
}
|
||||
|
||||
pub fn get_reserves(&self, address: Address) -> Option<(U112, U112)> {
|
||||
self.pairs
|
||||
.get(&address)
|
||||
.map(|pair| (pair.reserve0, pair.reserve1))
|
||||
}
|
||||
|
||||
pub fn update_reserves(
|
||||
&mut self,
|
||||
address: Address,
|
||||
reserve0: U112,
|
||||
reserve1: U112,
|
||||
) -> Result<()> {
|
||||
if let Some(pair) = self.pairs.get_mut(&address) {
|
||||
pair.reserve0 = reserve0;
|
||||
pair.reserve1 = reserve1;
|
||||
info!(
|
||||
"Updated reserves for pair {}: reserve0: {}, reserve1: {}",
|
||||
address, reserve0, reserve1
|
||||
);
|
||||
Ok(())
|
||||
} else {
|
||||
debug!("Pair {} not found", address);
|
||||
Ok(()) // TODO return Err
|
||||
}
|
||||
}
|
||||
|
||||
// TODO at the moment we return all the opportunities, instead we should return only the two opportunities
|
||||
// (token0 -> token1 -> token0 and token1 -> token0 -> token1) with the highest amountIn
|
||||
// Remember: choosing an opportunity invalidates the other ones
|
||||
pub fn look_for_opportunity(
|
||||
&self,
|
||||
token0: Address,
|
||||
token1: Address,
|
||||
) -> Vec<ArbitrageOpportunity> {
|
||||
let mut opportunities: Vec<ArbitrageOpportunity> = Vec::new();
|
||||
if let Some(pairs) = self.by_tokens.get(&AddressPair(token0, token1)) {
|
||||
pairs.iter()
|
||||
.permutations(2)
|
||||
.any(|pairs| {
|
||||
let pair_a = self.get(*pairs[0]).unwrap();
|
||||
let pair_b = self.get(*pairs[1]).unwrap();
|
||||
|
||||
if let Some(optimum) = optimal_in(pair_a.reserve0, pair_a.reserve1, pair_b.reserve0, pair_b.reserve1) {
|
||||
info!("Found arbitrage opportunity between pairs {} and {} swapping {} along token0 -> token1 -> token0", pairs[0], pairs[1], optimum);
|
||||
opportunities.push(ArbitrageOpportunity{
|
||||
pair_a: *pairs[0],
|
||||
pair_b: *pairs[1],
|
||||
direction: true,
|
||||
optimum
|
||||
});
|
||||
}
|
||||
if let Some(optimum) = optimal_in(pair_a.reserve1, pair_a.reserve0, pair_b.reserve1, pair_b.reserve0) {
|
||||
info!("Found arbitrage opportunity between pairs {} and {} swapping {} along token1 -> token0 -> token1", pairs[0], pairs[1], optimum);
|
||||
opportunities.push(ArbitrageOpportunity{
|
||||
pair_a: *pairs[0],
|
||||
pair_b: *pairs[1],
|
||||
direction: false,
|
||||
optimum
|
||||
});
|
||||
}
|
||||
false
|
||||
});
|
||||
}
|
||||
opportunities
|
||||
}
|
||||
}
|
||||
|
||||
fn optimal_in(x_a: U112, y_a: U112, x_b: U112, y_b: U112) -> Option<U256> {
|
||||
let x_a = U256::from(x_a);
|
||||
let x_b = U256::from(x_b);
|
||||
let y_a = U256::from(y_a);
|
||||
let y_b = U256::from(y_b);
|
||||
let f = U256::from(997);
|
||||
let ff = f.pow(U256::from(2));
|
||||
let _1000 = U256::from(1000);
|
||||
let _1000000 = U256::from(1000000);
|
||||
|
||||
let k = f * x_b / _1000 + ff / _1000 * x_a / _1000;
|
||||
let phi = (ff * x_a * y_b * x_b * y_a / _1000000).root(2);
|
||||
let psi = y_a * x_b;
|
||||
|
||||
if psi >= phi {
|
||||
None
|
||||
} else {
|
||||
Some((phi - psi) / k)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
mod pairs;
|
||||
|
||||
use alloy::primitives::Address;
|
||||
use futures_util::Stream;
|
||||
use log::debug;
|
||||
use std::{
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Action {
|
||||
ProcessPair(Address),
|
||||
}
|
||||
|
||||
pub struct PriorityQueue(pub Vec<Action>);
|
||||
|
||||
impl PriorityQueue {
|
||||
pub fn new() -> Self {
|
||||
PriorityQueue(Vec::new())
|
||||
}
|
||||
|
||||
pub fn push(&mut self, action: Action) {
|
||||
debug!("Adding action {:?} to the priority queue", action);
|
||||
self.0.push(action);
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for PriorityQueue {
|
||||
type Item = Action;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
match self.0.pop() {
|
||||
None => Poll::Ready(None),
|
||||
Some(action) => {
|
||||
debug!("Consuming action {:?} to the priority queue", action);
|
||||
|
||||
match action {
|
||||
Action::ProcessPair(pair) => Poll::Ready(Some(Action::ProcessPair(pair))),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,30 +1,28 @@
|
|||
#[path = "pairs.rs"]
|
||||
mod pairs;
|
||||
#[path = "priority_queue.rs"]
|
||||
mod priority_queue;
|
||||
|
||||
use std::{
|
||||
sync::{Arc, Mutex},
|
||||
time::Duration,
|
||||
};
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
|
||||
use crate::config::Config;
|
||||
use alloy::{
|
||||
eips::BlockNumberOrTag,
|
||||
primitives::Address,
|
||||
primitives::{aliases::U112, Address, U256},
|
||||
providers::{
|
||||
fillers::FillProvider, DynProvider, Provider, ProviderBuilder, RootProvider, WsConnect,
|
||||
fillers::{BlobGasFiller, ChainIdFiller, FillProvider, GasFiller, JoinFill, NonceFiller},
|
||||
Provider, ProviderBuilder, RootProvider, WsConnect,
|
||||
},
|
||||
pubsub::PubSubFrontend,
|
||||
rpc::types::Filter,
|
||||
rpc::{client::RpcClient, types::Filter},
|
||||
transports::layers::RetryBackoffLayer,
|
||||
};
|
||||
use futures_util::{stream, StreamExt};
|
||||
use miette::{miette, Result};
|
||||
|
||||
use futures_util::StreamExt;
|
||||
use log::{debug, info};
|
||||
use miette::miette;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
sync::{Arc, Mutex},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
#[path = "pairs.rs"]
|
||||
mod pairs;
|
||||
use pairs::Pairs;
|
||||
use priority_queue::{Action, PriorityQueue};
|
||||
use tokio::time::sleep;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
alloy::sol!(
|
||||
#[allow(missing_docs)]
|
||||
|
@ -33,14 +31,27 @@ alloy::sol!(
|
|||
"abi/IUniswapV2Pair.json"
|
||||
);
|
||||
|
||||
async fn process_swaps(
|
||||
ws: WsConnect,
|
||||
priority_queue: Arc<Mutex<PriorityQueue>>,
|
||||
) -> eyre::Result<()> {
|
||||
let provider = ProviderBuilder::new().on_ws(ws).await.unwrap();
|
||||
#[derive(Debug)]
|
||||
pub enum Action {
|
||||
ProcessNewPair(Address),
|
||||
ProcessOldPair(Address, U256, U256, U256, U256),
|
||||
}
|
||||
|
||||
type AlloyProvider = FillProvider<
|
||||
JoinFill<
|
||||
alloy::providers::Identity,
|
||||
JoinFill<GasFiller, JoinFill<BlobGasFiller, JoinFill<NonceFiller, ChainIdFiller>>>,
|
||||
>,
|
||||
RootProvider,
|
||||
>;
|
||||
|
||||
async fn subscribe(
|
||||
provider: Arc<AlloyProvider>,
|
||||
pairs: Arc<Mutex<Pairs>>,
|
||||
tx: mpsc::Sender<Action>,
|
||||
) -> eyre::Result<()> {
|
||||
let filter = Filter::new()
|
||||
.event("Swap(address,uint256,uint256,uint256,uint256,address)")
|
||||
.event("Swap(address,uint256,uint256,uint256,uint256,address)") // TODO manage also sync and skim
|
||||
.from_block(BlockNumberOrTag::Latest);
|
||||
|
||||
let sub = provider.subscribe_logs(&filter).await?;
|
||||
|
@ -55,61 +66,130 @@ async fn process_swaps(
|
|||
info!("Processing block number {:?}", block_number);
|
||||
}
|
||||
|
||||
priority_queue
|
||||
.lock()
|
||||
.unwrap()
|
||||
.push(Action::ProcessPair(log.address()));
|
||||
let IUniswapV2Pair::Swap {
|
||||
sender: _,
|
||||
amount0In,
|
||||
amount1In,
|
||||
amount0Out,
|
||||
amount1Out,
|
||||
to: _,
|
||||
} = log.log_decode()?.inner.data;
|
||||
|
||||
let pair_address = log.address();
|
||||
let pair_already_known = pairs.lock().unwrap().get(pair_address).is_some();
|
||||
|
||||
debug!("Event by pair {:?}", pair_address);
|
||||
|
||||
if pair_already_known {
|
||||
tx.send(Action::ProcessOldPair(
|
||||
pair_address,
|
||||
amount0In,
|
||||
amount1In,
|
||||
amount0Out,
|
||||
amount1Out,
|
||||
))
|
||||
.await?;
|
||||
} else {
|
||||
tx.send(Action::ProcessNewPair(pair_address)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn process_pair(
|
||||
ws: WsConnect,
|
||||
async fn process_new_pair(
|
||||
pairs: Arc<Mutex<Pairs>>,
|
||||
pair_address: Address,
|
||||
provider: Arc<AlloyProvider>,
|
||||
) -> eyre::Result<()> {
|
||||
let provider = ProviderBuilder::new().on_ws(ws).await.unwrap();
|
||||
let result: eyre::Result<()> = async {
|
||||
let pair = IUniswapV2Pair::new(pair_address, provider.clone()); // todo can avoid che clone?
|
||||
let token0 = pair.token0().call().await?._0;
|
||||
let token1 = pair.token1().call().await?._0;
|
||||
let reserve0 = pair.getReserves().call().await?.reserve0;
|
||||
let reserve1 = pair.getReserves().call().await?.reserve1;
|
||||
let factory = pair.factory().call().await?._0;
|
||||
|
||||
let pair = IUniswapV2Pair::new(pair_address, provider);
|
||||
let token0 = pair.token0().call().await?._0;
|
||||
let token1 = pair.token1().call().await?._0;
|
||||
let reserve0 = pair.getReserves().call().await?.reserve0;
|
||||
let reserve1 = pair.getReserves().call().await?.reserve1;
|
||||
let factory = pair.factory().call().await?._0;
|
||||
pairs
|
||||
.lock()
|
||||
.unwrap()
|
||||
.add(pair_address, token0, token1, reserve0, reserve1, factory);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
|
||||
if let Err(e) = &result {
|
||||
eprintln!("error adding the new pair {}: {}", pair_address, e);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn process_old_pair(
|
||||
pairs: Arc<Mutex<Pairs>>,
|
||||
pair_address: Address,
|
||||
amount0_in: U256,
|
||||
amount1_in: U256,
|
||||
amount0_out: U256,
|
||||
amount1_out: U256,
|
||||
) -> eyre::Result<()> {
|
||||
let (reserve0, reserve1) = pairs.lock().unwrap().get_reserves(pair_address).unwrap();
|
||||
pairs
|
||||
.lock()
|
||||
.unwrap()
|
||||
.add(pair_address, token0, token1, reserve0, reserve1, factory);
|
||||
.update_reserves(
|
||||
pair_address,
|
||||
reserve0 - U112::from(amount0_out) + U112::from(amount0_in),
|
||||
reserve1 - U112::from(amount1_in) + U112::from(amount1_out),
|
||||
)
|
||||
.unwrap(); // TODO manage error
|
||||
|
||||
Ok(()) // TODO manage errors
|
||||
}
|
||||
|
||||
async fn process_known_pairs(
|
||||
pairs: Arc<Mutex<Pairs>>,
|
||||
provider: Arc<AlloyProvider>,
|
||||
) -> eyre::Result<()> {
|
||||
let addresses = pairs.lock().unwrap().get_addresses();
|
||||
let len = addresses.len();
|
||||
|
||||
info!("Recovering state of {:?} saved pairs", len);
|
||||
|
||||
for (i, address) in addresses.into_iter().enumerate() {
|
||||
info!("Processing pair {}/{}: {:?}", i + 1, len, address);
|
||||
|
||||
let result: eyre::Result<()> = async {
|
||||
let pair = IUniswapV2Pair::new(address, provider.clone());
|
||||
let reserves = pair.getReserves().call().await?;
|
||||
let reserve0 = reserves.reserve0;
|
||||
let reserve1 = reserves.reserve1;
|
||||
|
||||
let _ = pairs
|
||||
.lock()
|
||||
.unwrap()
|
||||
.update_reserves(address, reserve0, reserve1); // TODO manage error, should be ok however
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
|
||||
if let Err(e) = &result {
|
||||
eprintln!("Error processing pair {}: {}", address, e);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn consume_priority_queue(
|
||||
ws: WsConnect,
|
||||
pairs: Arc<Mutex<Pairs>>,
|
||||
priority_queue: Arc<Mutex<PriorityQueue>>,
|
||||
config: Config,
|
||||
) {
|
||||
let mut guard = priority_queue.lock().unwrap();
|
||||
let actions: Vec<Action> = guard.0.drain(..).collect(); //move all actions to temporary vector in order to unlock the mutex
|
||||
drop(guard); //release before the expensive operation
|
||||
fn look_for_opportunity(pairs: Arc<Mutex<Pairs>>, involved_pairs: &HashSet<Address>) {
|
||||
let pairs = pairs.lock().unwrap();
|
||||
|
||||
stream::iter(actions)
|
||||
.for_each_concurrent(config.concurrency, |action| {
|
||||
let pairs_clone = pairs.clone();
|
||||
let ws = ws.clone();
|
||||
async move {
|
||||
match action {
|
||||
Action::ProcessPair(pair_address) => {
|
||||
info!("Processing pair: {:?}", pair_address);
|
||||
process_pair(ws, pairs_clone, pair_address).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.await;
|
||||
for pair_address in involved_pairs {
|
||||
let (token0, token1) = pairs.get_tokens(*pair_address).unwrap();
|
||||
let _opportunities = pairs.look_for_opportunity(token0, token1);
|
||||
}
|
||||
}
|
||||
|
||||
async fn manage_interruption(pairs: Arc<Mutex<Pairs>>, config: Config) -> eyre::Result<()> {
|
||||
|
@ -126,25 +206,93 @@ async fn manage_interruption(pairs: Arc<Mutex<Pairs>>, config: Config) -> eyre::
|
|||
std::process::exit(0);
|
||||
}
|
||||
|
||||
pub fn run(config: Config) -> Result<()> {
|
||||
let runtime = tokio::runtime::Runtime::new().unwrap();
|
||||
pub fn run(config: Config) -> miette::Result<()> {
|
||||
let runtime = tokio::runtime::Runtime::new().map_err(|e| miette!(e))?;
|
||||
let pairs = Arc::new(Mutex::new(Pairs::new(&config.pairs_file)?));
|
||||
let priority_queue = Arc::new(Mutex::new(PriorityQueue::new()));
|
||||
let ws = WsConnect::new(&config.endpoint);
|
||||
|
||||
let (tx, mut rx) = mpsc::channel::<Action>(5000);
|
||||
|
||||
runtime.block_on(async {
|
||||
tokio::spawn(manage_interruption(pairs.clone(), config.clone()));
|
||||
|
||||
// process all the `Swap` events adding actions to the queue
|
||||
tokio::spawn(process_swaps(ws.clone(), priority_queue.clone()));
|
||||
let retry_layer = RetryBackoffLayer::new(50, 500, 100);
|
||||
let client = RpcClient::builder()
|
||||
.layer(retry_layer)
|
||||
.ws(ws)
|
||||
.await
|
||||
.map_err(|e| miette!(e))?;
|
||||
let provider = Arc::new(ProviderBuilder::new().on_client(client));
|
||||
let signer: PrivateKeySigner = "".parse().unwrap();
|
||||
|
||||
info!("Subscribing to the events...");
|
||||
tokio::spawn(subscribe(provider.clone(), pairs.clone(), tx.clone()));
|
||||
|
||||
info!("Processing known pairs...");
|
||||
process_known_pairs(pairs.clone(), provider.clone())
|
||||
.await
|
||||
.map_err(|e| miette!(e))?;
|
||||
info!("Finished processing known pairs...");
|
||||
|
||||
let mut queue_last_time_not_empty = Instant::now();
|
||||
let mut block_processed = false;
|
||||
let mut involved_pairs: HashSet<Address> = HashSet::new();
|
||||
|
||||
loop {
|
||||
consume_priority_queue(ws.clone(), pairs.clone(), priority_queue.clone(), config.clone()).await;
|
||||
let action = rx.try_recv();
|
||||
|
||||
debug!("The entire queue has been processed, waiting 100ms before checking if new actions are available...");
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
if let Ok(action) = action {
|
||||
queue_last_time_not_empty = Instant::now();
|
||||
block_processed = false;
|
||||
let len = rx.len();
|
||||
|
||||
debug!(
|
||||
"Processing action {:?}, {:?} actions left",
|
||||
action, len
|
||||
);
|
||||
|
||||
match action {
|
||||
Action::ProcessNewPair(pair_address) => {
|
||||
process_new_pair(pairs.clone(), pair_address, provider.clone())
|
||||
.await
|
||||
.map_err(|e| miette!(e))?;
|
||||
involved_pairs.insert(pair_address);
|
||||
}
|
||||
Action::ProcessOldPair(
|
||||
pair_address,
|
||||
amount0_in,
|
||||
amount1_in,
|
||||
amount0_out,
|
||||
amount1_out,
|
||||
) => {
|
||||
process_old_pair(
|
||||
pairs.clone(),
|
||||
pair_address,
|
||||
amount0_in,
|
||||
amount1_in,
|
||||
amount0_out,
|
||||
amount1_out,
|
||||
)
|
||||
.map_err(|e| miette!(e))?;
|
||||
involved_pairs.insert(pair_address);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !block_processed && Instant::now().duration_since(queue_last_time_not_empty) > Duration::from_millis(50) {
|
||||
info!("The actions queue has been empty for 100ms, we assume the entire block has been processed");
|
||||
info!("Involved pairs: {:?}", involved_pairs);
|
||||
|
||||
look_for_opportunity(pairs.clone(), &involved_pairs);
|
||||
|
||||
block_processed = true;
|
||||
involved_pairs.clear();
|
||||
};
|
||||
|
||||
std::thread::sleep(Duration::from_millis(10));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
#[allow(unreachable_code)]
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
|
|
@ -3,8 +3,10 @@ pragma solidity ^0.8.28;
|
|||
|
||||
import {IUniswapV2Pair} from "./IUniswapV2Pair.sol";
|
||||
import {IERC20} from "./IERC20.sol";
|
||||
import {IUniswapV2Callee} from "./IUniswapV2Callee.sol";
|
||||
|
||||
contract ArbitrageManager {
|
||||
|
||||
contract ArbitrageManager is IUniswapV2Callee {
|
||||
uint256 constant f = 997;
|
||||
|
||||
function sqrt(uint256 x)
|
||||
|
@ -54,36 +56,59 @@ contract ArbitrageManager {
|
|||
returns(uint256)
|
||||
{
|
||||
uint256 k = f * X_B / 1000 + f ** 2 / 1000 * X_A / 1000;
|
||||
uint256 phi = sqrt(f * X_A) * sqrt(Y_B * X_B / 1000 * Y_A);
|
||||
uint256 phi = sqrt(f ** 2 * X_A * Y_B * X_B * Y_A / 1000**2);
|
||||
uint256 psi = Y_A * X_B;
|
||||
|
||||
if (psi >= phi) return 0;
|
||||
else return (phi - psi) / k;
|
||||
}
|
||||
|
||||
function swap(address _pairA, address _pairB, uint256 amountIn, bool direction)
|
||||
external
|
||||
returns (uint256 amountOut)
|
||||
function flashArbitrage(address firstPair, address secondPair, bool tokenDir)
|
||||
public
|
||||
returns (uint256 gain)
|
||||
{
|
||||
IUniswapV2Pair pairA = IUniswapV2Pair(_pairA);
|
||||
IUniswapV2Pair pairB = IUniswapV2Pair(_pairB);
|
||||
IUniswapV2Pair pairA = IUniswapV2Pair(firstPair);
|
||||
IUniswapV2Pair pairB = IUniswapV2Pair(secondPair);
|
||||
|
||||
IERC20 tokenA = direction ? IERC20(pairA.token0()) : IERC20(pairA.token1());
|
||||
IERC20 firstToken = tokenDir ? IERC20(pairA.token0()) : IERC20(pairA.token1());
|
||||
IERC20 secondToken = tokenDir ? IERC20(pairA.token1()) : IERC20(pairA.token0());
|
||||
|
||||
(uint256 X_A, uint256 Y_A,) = pairA.getReserves();
|
||||
(uint256 X_B, uint256 Y_B,) = pairB.getReserves();
|
||||
|
||||
// Transfer the input tokens from the sender to pairA
|
||||
tokenA.transferFrom(msg.sender, address(pairA), amountIn);
|
||||
uint256 amountIn = optimalIn(tokenDir ? X_B : X_A, tokenDir ? Y_B : Y_A, tokenDir ? X_A : X_B, tokenDir ? Y_A : Y_B);
|
||||
uint256 firstAmountOut = getAmountOut(amountIn, tokenDir ? X_A : Y_A, tokenDir ? Y_A : X_A);
|
||||
uint256 secondAmountOut = getAmountOut(firstAmountOut, tokenDir ? Y_B : X_B, tokenDir ? X_B : Y_B);
|
||||
|
||||
// Perform the first swap on pairA
|
||||
(uint256 reserve0A, uint256 reserve1A,) = pairA.getReserves();
|
||||
amountOut = getAmountOut(amountIn, direction ? reserve0A : reserve1A, direction ? reserve1A : reserve0A);
|
||||
pairA.swap(direction ? 0 : amountOut, direction ? amountOut : 0, address(pairB), new bytes(0));
|
||||
require(secondAmountOut > amountIn, "Not profitable");
|
||||
|
||||
// Perform the second swap on pairB
|
||||
(uint256 reserve0B, uint256 reserve1B,) = pairB.getReserves();
|
||||
amountOut = getAmountOut(amountOut, direction ? reserve1B : reserve0B, direction ? reserve0B : reserve1B);
|
||||
pairB.swap(direction ? amountOut : 0, direction ? 0 : amountOut, msg.sender, new bytes(0));
|
||||
bytes memory data = abi.encode(pairA, pairB, firstToken, secondToken, amountIn, secondAmountOut);
|
||||
pairA.swap(tokenDir ? 0 : firstAmountOut, tokenDir ? firstAmountOut : 0, address(this), data);
|
||||
uint256 profit = secondAmountOut - amountIn;
|
||||
firstToken.transfer(msg.sender, profit);
|
||||
|
||||
return profit;
|
||||
}
|
||||
|
||||
// Ensure that the arbitrage is profitable
|
||||
require(amountOut > amountIn, "Arbitrage not profitable");
|
||||
function uniswapV2Call(address sender, uint256 amount0, uint256 amount1, bytes memory data)
|
||||
public
|
||||
{
|
||||
(address pairA, address pairB, address firstToken, address secondToken, uint256 amountIn, uint256 secondAmountOut) = abi.decode(data, (address, address, address, address, uint256, uint256));
|
||||
|
||||
bool tokenDir = amount0 == 0;
|
||||
IERC20(secondToken).transfer(pairB, tokenDir ? amount1 : amount0);
|
||||
IUniswapV2Pair(pairB).swap(tokenDir ? secondAmountOut : 0, tokenDir ? 0 : secondAmountOut, sender, new bytes(0));
|
||||
IERC20(firstToken).transfer(pairA, amountIn);
|
||||
}
|
||||
|
||||
fallback() external {
|
||||
(
|
||||
address sender,
|
||||
uint256 amount0,
|
||||
uint256 amount1,
|
||||
bytes memory data
|
||||
) = abi.decode(msg.data[4:], (address, uint256, uint256, bytes));
|
||||
|
||||
uniswapV2Call(sender, amount0, amount1, data);
|
||||
}
|
||||
}
|
||||
|
|
5
onchain/src/IUniswapV2Callee.sol
Normal file
5
onchain/src/IUniswapV2Callee.sol
Normal file
|
@ -0,0 +1,5 @@
|
|||
pragma solidity ^0.8.28;
|
||||
|
||||
interface IUniswapV2Callee {
|
||||
function uniswapV2Call(address sender, uint amount0, uint amount1, bytes calldata data) external;
|
||||
}
|
|
@ -16,7 +16,6 @@ contract ArbitrageTest is Test {
|
|||
IUniswapV2Pair sushiswapPair = IUniswapV2Pair(0x397FF1542f962076d0BFE58eA045FfA2d347ACa0);
|
||||
|
||||
function setUp() public {
|
||||
mainnetFork = vm.createFork("https://eth-mainnet.g.alchemy.com/v2/kkDMaLVYpWQA0GsCYNFvAODnAxCCiamv"); // TODO use an env variable
|
||||
vm.selectFork(mainnetFork);
|
||||
vm.rollFork(22_147_269);
|
||||
arbitrageManager = new ArbitrageManager();
|
||||
|
@ -34,12 +33,13 @@ contract ArbitrageTest is Test {
|
|||
n = 115792089237316195423570985008687907853269984665640564039457584007913129639935;
|
||||
assertEq(340282366920938463463374607431768211456 - 1, arbitrageManager.sqrt(n)); // +-1 is an acceptable rounding error
|
||||
}
|
||||
|
||||
function test_computeAmountIn() public {
|
||||
|
||||
function test_swapUsingOptimum() public {
|
||||
(uint256 X_A, uint256 Y_A, ) = uniswapPair.getReserves(); // (USDT, WETH)
|
||||
(uint256 X_B, uint256 Y_B, ) = sushiswapPair.getReserves(); // (USDT, WETH)
|
||||
|
||||
console.log("Uniswap pair reserves", X_A, Y_A);
|
||||
console.log("Sushiswap pair reserves", X_B, Y_B);
|
||||
console.log("Uniswap pair ratio", Y_A/X_A);
|
||||
console.log("Sushiswap pair ratio", Y_B/X_B);
|
||||
|
||||
|
@ -53,6 +53,7 @@ contract ArbitrageTest is Test {
|
|||
(X_A, Y_A, ) = uniswapPair.getReserves();
|
||||
(X_B, Y_B, ) = sushiswapPair.getReserves();
|
||||
console.log("Uniswap pair reserves", X_A, Y_A);
|
||||
console.log("Sushiswap pair reserves", X_B, Y_B);
|
||||
console.log("Uniswap pair ratio", Y_A/X_A);
|
||||
console.log("Sushiswap pair ratio", Y_B/X_B);
|
||||
|
||||
|
@ -74,4 +75,47 @@ contract ArbitrageTest is Test {
|
|||
console.log("Uniswap pair ratio", Y_A/X_A);
|
||||
console.log("Sushiswap pair ratio", Y_B/X_B);
|
||||
}
|
||||
|
||||
function computeGain(uint256 X_A, uint256 Y_A, uint256 X_B, uint256 Y_B, int256 delta)
|
||||
internal view returns(uint256)
|
||||
{
|
||||
uint256 optimum = (delta > 0) ?
|
||||
arbitrageManager.optimalIn(X_A, Y_A, X_B, Y_B) + uint256(delta)
|
||||
: arbitrageManager.optimalIn(X_A, Y_A, X_B, Y_B) - uint256(-delta);
|
||||
uint256 amountOut = arbitrageManager.getAmountOut(optimum, Y_A, X_A);
|
||||
amountOut = arbitrageManager.getAmountOut(amountOut, X_B, Y_B);
|
||||
return amountOut - optimum;
|
||||
}
|
||||
|
||||
function test_computeOptimum() public view {
|
||||
(uint256 X_A, uint256 Y_A, ) = uniswapPair.getReserves(); // (USDT, WETH)
|
||||
(uint256 X_B, uint256 Y_B, ) = sushiswapPair.getReserves(); // (USDT, WETH)
|
||||
Y_A -= Y_A / 5; // unbalancing the pair
|
||||
|
||||
// Using delta too low (~< 10**8) seems to produce a better gain,
|
||||
// I *believe this has to do to some rounding, it should be neglibile
|
||||
uint256[4] memory deltas = [uint256(0), uint256(10**8), uint256(10**9), uint256(10**10)];
|
||||
|
||||
uint256 gain = computeGain(X_A, Y_A, X_B, Y_B, 0);
|
||||
|
||||
for (uint256 i; i < deltas.length; i++) {
|
||||
assertGe(gain, computeGain(X_A, Y_A, X_B, Y_B, int256(deltas[i])), "Computed optimum isnt't really optimal");
|
||||
assertGe(gain, computeGain(X_A, Y_A, X_B, Y_B, -int256(deltas[i])), "Computed optimum isnt't really optimal");
|
||||
}
|
||||
}
|
||||
|
||||
function test_flashArbitrage () public {
|
||||
uint256 initialWethBalance = weth.balanceOf(address(this));
|
||||
|
||||
console.log("initial weth balance", initialWethBalance);
|
||||
(, uint256 Y_A, ) = uniswapPair.getReserves(); // (USDT, WETH)
|
||||
uint256 unbalance = Y_A / 5;
|
||||
vm.prank(address(uniswapPair)); // it works only for the next call
|
||||
weth.transfer(address(0), unbalance);
|
||||
uniswapPair.sync();
|
||||
|
||||
uint256 profit = arbitrageManager.flashArbitrage(address(uniswapPair), address(sushiswapPair), false);
|
||||
console.log("profit", profit);
|
||||
assertEq(initialWethBalance + profit, weth.balanceOf(address(this)), "There was no profit");
|
||||
}
|
||||
}
|
||||
|
|
5
secrets/alchemy_key.age
Normal file
5
secrets/alchemy_key.age
Normal file
|
@ -0,0 +1,5 @@
|
|||
age-encryption.org/v1
|
||||
-> ssh-ed25519 Zh7Kmw 6NFxuvVROgzHIvJPZqniuXinr9XMhNtt4hwW0do9Gio
|
||||
g8FOQSOHN0xF7QV1fa9lkq62Fim+TQaWWLqGjppn2QE
|
||||
--- /bcjNPkDej+yknSFozObJz/QAY4fzzVOm6V4iE5BBHc
|
||||
¿4¬î| ¶>Þ,K8’ö¢hb<
<0A>'U¾œq×Zµb¬zE<7A>ÂùÀ*OPSÏKôŸ<C3B4>“P<E2809C>qµqkÇ
tpŸ¤zcë
|
7
secrets/secrets.nix
Normal file
7
secrets/secrets.nix
Normal file
|
@ -0,0 +1,7 @@
|
|||
let
|
||||
aciceri = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIm9Sl/I+5G4g4f6iE4oCUJteP58v+wMIew9ZuLB+Gea";
|
||||
in
|
||||
{
|
||||
"alchemy_key.age".publicKeys = [ aciceri ];
|
||||
"wallet_private_key.age".publicKeys = [ aciceri ];
|
||||
}
|
12
secrets/wallet_private_key.age
Normal file
12
secrets/wallet_private_key.age
Normal file
|
@ -0,0 +1,12 @@
|
|||
-----BEGIN AGE ENCRYPTED FILE-----
|
||||
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IHNzaC1lZDI1NTE5IFpoN0ttdyB0VC96
|
||||
SmZNQjJpMlR3eXU2bzNmK1BYMk5ta3JpSDFCZGpmS2k4R3B6L1NFClJZS055amRO
|
||||
UEJXL1IvUjN3Mnppbks5emxaUlpoRkhLUEZVRUhwKzFpRUkKLT4gfkwoQUlUJlot
|
||||
Z3JlYXNlIHojVEdeIF1VSmlVIFxfYApnd1o4SitNK1NKR0dMaDBEUUk4QndKY3hB
|
||||
YTFTUGtsL0JRWVIzM1lzbmhUUlpxdSs0d3RMd2NQU3Y2ZG9MdHNMCk1rcFFvYzBX
|
||||
dnVmMjcrcnBFbHdVb0pNbjlObnNtRkx4ZDNYZkRSWWN3dnF3UkxIQ1ptSmJjSGN4
|
||||
d1BKZgotLS0gdHlBMHpGeGdqdElFUWJZVWVoc0x4MGEvc3lGMzhkUGFHTHlCbkNy
|
||||
c2JIawp8pD+QIU4hfw8ySNWye098z1ZQSXn267JuzH1oE20GY0ubK2TDWfxUHNht
|
||||
jBhdgTnVPqQmBX8N0wDeB16AWmC/YuPEz52zZzgZ85Hy61N7E9m5ZDOaBhb1VJpD
|
||||
Pf9T0uo=
|
||||
-----END AGE ENCRYPTED FILE-----
|
Loading…
Add table
Add a link
Reference in a new issue