From a22d435fe0c2817ff9bda94e1ed0419d6acb5d86 Mon Sep 17 00:00:00 2001 From: josibake Date: Tue, 5 Nov 2024 13:07:51 +0100 Subject: [PATCH 01/44] assumeutxo benchmarking patch introduce two commandline options for assumeutxo, specifically for benchmarking. these commands are: - pausebackgroundsync - an option lets the user pause the verification of historical blocks in the background -loadutxosnapshot= - load an assumeutxo snapshot on startup, instead of needing to go through the rpc command. the node will shutdown immediately after the snapshot has been loaded this path is not meant for general use and is instead just for making it more ergonomic to use assumeutxo for benchmarking IBD changes. the benefits of using assumeutxo here are we can start from an arbitrary height and sync to chaintip to collect relevant data quickly. using assumeutxo means we can make whatever changes we need to the chainstatedb, since it will be created fresh from the snapshot. note, to use the loadutxosnapshot option, you must first run: ./build/src/bitcoind -stopatheight=1 this makes the node do a header sync and then shut down. this is because assumeutxo will not load a snapshot unless the base block is in the header chain. we could remove this requirement, but this patch is meant to be as minimal as possible, and this also allows us to perform heaeder sync as a preparation commit for a benchmark, which helps keep IBD benchmarks more focused on strictly measuring IBD. next, run: ./build/src/bitcoind -loadutxosnapshot= the node will shutdown after the snapshot is loaded. finally, run: ./build/src/bitcoind -pausebackgroundsync=1 for the actual benchmarking step. this ensures only the sync to chaintip is benchmarked and the load snapshot step is not included in the measurement. Co-authored-by: Sjors Provoost <10217+sjors@users.noreply.github.com> --- src/init.cpp | 59 ++++++++++++++++++++++++++++- src/kernel/chainstatemanager_opts.h | 2 + src/node/chainstatemanager_args.cpp | 2 + src/node/chainstatemanager_args.h | 2 + src/validation.cpp | 6 +++ src/validation.h | 4 +- 6 files changed, 71 insertions(+), 4 deletions(-) diff --git a/src/init.cpp b/src/init.cpp index f79ebe881dc0..5dd6fa210add 100644 --- a/src/init.cpp +++ b/src/init.cpp @@ -54,6 +54,7 @@ #include #include #include +#include #include #include #include @@ -140,6 +141,7 @@ using node::VerifyLoadedChainstate; using util::Join; using util::ReplaceAll; using util::ToString; +using node::SnapshotMetadata; static constexpr bool DEFAULT_PROXYRANDOMIZE{true}; static constexpr bool DEFAULT_REST_ENABLE{false}; @@ -158,6 +160,44 @@ static constexpr bool DEFAULT_STOPAFTERBLOCKIMPORT{false}; static constexpr int MIN_CORE_FDS = MIN_LEVELDB_FDS + NUM_FDS_MESSAGE_CAPTURE; static const char* DEFAULT_ASMAP_FILENAME="ip_asn.map"; +bool LoadUTXOSnapshot(NodeContext& node, const fs::path& snapshot_path) { + ChainstateManager& chainman = *node.chainman; + + FILE* file{fsbridge::fopen(snapshot_path, "rb")}; + AutoFile afile{file}; + if (afile.IsNull()) { + LogPrintf("Error: Couldn't open UTXO snapshot file %s for reading\n", snapshot_path.utf8string()); + return false; + } + + SnapshotMetadata metadata{chainman.GetParams().MessageStart()}; + try { + afile >> metadata; + } catch (const std::ios_base::failure& e) { + LogPrintf("Error: Unable to parse snapshot metadata: %s\n", e.what()); + return false; + } + + auto activation_result{chainman.ActivateSnapshot(afile, metadata, false)}; + if (!activation_result) { + LogPrintf("Error: Unable to load UTXO snapshot: %s\n", + util::ErrorString(activation_result).original); + return false; + } + + // Update services to reflect limited peer capabilities during sync + node.connman->RemoveLocalServices(NODE_NETWORK); + node.connman->AddLocalServices(NODE_NETWORK_LIMITED); + + CBlockIndex& snapshot_index{*CHECK_NONFATAL(*activation_result)}; + LogPrintf("Loaded UTXO snapshot: coins=%d, height=%d, hash=%s\n", + metadata.m_coins_count, + snapshot_index.nHeight, + snapshot_index.GetBlockHash().ToString()); + + return true; +} + /** * The PID file facilities. */ @@ -495,6 +535,12 @@ void SetupServerArgs(ArgsManager& argsman, bool can_listen_ipc) argsman.AddArg("-minimumchainwork=", strprintf("Minimum work assumed to exist on a valid chain in hex (default: %s, testnet3: %s, testnet4: %s, signet: %s)", defaultChainParams->GetConsensus().nMinimumChainWork.GetHex(), testnetChainParams->GetConsensus().nMinimumChainWork.GetHex(), testnet4ChainParams->GetConsensus().nMinimumChainWork.GetHex(), signetChainParams->GetConsensus().nMinimumChainWork.GetHex()), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::OPTIONS); argsman.AddArg("-par=", strprintf("Set the number of script verification threads (0 = auto, up to %d, <0 = leave that many cores free, default: %d)", MAX_SCRIPTCHECK_THREADS, DEFAULT_SCRIPTCHECK_THREADS), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); + argsman.AddArg("-pausebackgroundsync", strprintf("When a UTXO snapshot is loaded, pause the verification of historical blocks in the background (default: %u)", DEFAULT_PAUSE_BACKGROUND_SYNC), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); + gArgs.AddArg("-loadutxosnapshot=", + "Load UTXO set from snapshot file at startup. " + "This allows fast synchronization by loading a pre-built UTXO " + "snapshot while the full chain validation happens in background.", + ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); argsman.AddArg("-persistmempool", strprintf("Whether to save the mempool on shutdown and load on restart (default: %u)", DEFAULT_PERSIST_MEMPOOL), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); argsman.AddArg("-persistmempoolv1", strprintf("Whether a mempool.dat file created by -persistmempool or the savemempool RPC will be written in the legacy format " @@ -1660,6 +1706,15 @@ bool AppInitMain(NodeContext& node, interfaces::BlockAndHeaderTipInfo* tip_info) ChainstateManager& chainman = *Assert(node.chainman); + if (args.IsArgSet("-loadutxosnapshot")) { + fs::path snapshot_path = fs::u8path(args.GetArg("-loadutxosnapshot", "")); + snapshot_path = AbsPathForConfigVal(args, snapshot_path); + + if (!LoadUTXOSnapshot(node, snapshot_path)) { + LogPrintf("Failed to load UTXO snapshot from %s", snapshot_path.utf8string()); + } + } + assert(!node.peerman); node.peerman = PeerManager::make(*node.connman, *node.addrman, node.banman.get(), chainman, @@ -1799,7 +1854,9 @@ bool AppInitMain(NodeContext& node, interfaces::BlockAndHeaderTipInfo* tip_info) }); } - if (ShutdownRequested(node)) { + // if loadutxosnapshot is set, we want to load the snapshot then shut down so that only + // syncing to chaintip is benchmarked + if (ShutdownRequested(node) || args.IsArgSet("-loadutxosnapshot")) { return false; } diff --git a/src/kernel/chainstatemanager_opts.h b/src/kernel/chainstatemanager_opts.h index 1b605f3d55df..dee7292b6310 100644 --- a/src/kernel/chainstatemanager_opts.h +++ b/src/kernel/chainstatemanager_opts.h @@ -51,6 +51,8 @@ struct ChainstateManagerOpts { int worker_threads_num{0}; size_t script_execution_cache_bytes{DEFAULT_SCRIPT_EXECUTION_CACHE_BYTES}; size_t signature_cache_bytes{DEFAULT_SIGNATURE_CACHE_BYTES}; + //! Whether to defer syncing the background chainstate after an assumeutxo snapshot is loaded + bool pause_background_sync{false}; }; } // namespace kernel diff --git a/src/node/chainstatemanager_args.cpp b/src/node/chainstatemanager_args.cpp index b86d0b299132..683c9cef7a23 100644 --- a/src/node/chainstatemanager_args.cpp +++ b/src/node/chainstatemanager_args.cpp @@ -73,6 +73,8 @@ util::Result ApplyArgsManOptions(const ArgsManager& args, ChainstateManage opts.signature_cache_bytes = clamped_size_each; } + opts.pause_background_sync = args.GetBoolArg("-pausebackgroundsync", DEFAULT_PAUSE_BACKGROUND_SYNC); + return {}; } } // namespace node diff --git a/src/node/chainstatemanager_args.h b/src/node/chainstatemanager_args.h index b2cdba68b8fe..6a7bb27478d6 100644 --- a/src/node/chainstatemanager_args.h +++ b/src/node/chainstatemanager_args.h @@ -14,6 +14,8 @@ class ArgsManager; static constexpr int MAX_SCRIPTCHECK_THREADS{15}; /** -par default (number of script-checking threads, 0 = auto) */ static constexpr int DEFAULT_SCRIPTCHECK_THREADS{0}; +/** -pausebackgroundsync default */ +static const bool DEFAULT_PAUSE_BACKGROUND_SYNC{false}; namespace node { [[nodiscard]] util::Result ApplyArgsManOptions(const ArgsManager& args, ChainstateManager::Options& opts); diff --git a/src/validation.cpp b/src/validation.cpp index 95f3bc58d7d8..a387bcd2dbdf 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -6429,6 +6429,12 @@ void ChainstateManager::RecalculateBestHeader() } } +bool ChainstateManager::BackgroundSyncInProgress() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) { + if (!IsUsable(m_snapshot_chainstate.get())) return false; + if (!IsUsable(m_ibd_chainstate.get())) return false; + return !m_options.pause_background_sync; +} + bool ChainstateManager::ValidatedSnapshotCleanup() { AssertLockHeld(::cs_main); diff --git a/src/validation.h b/src/validation.h index 723babca3156..57d885355cd3 100644 --- a/src/validation.h +++ b/src/validation.h @@ -1116,9 +1116,7 @@ class ChainstateManager CBlockIndex* ActiveTip() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) { return ActiveChain().Tip(); } //! The state of a background sync (for net processing) - bool BackgroundSyncInProgress() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) { - return IsUsable(m_snapshot_chainstate.get()) && IsUsable(m_ibd_chainstate.get()); - } + bool BackgroundSyncInProgress() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()); //! The tip of the background sync chain const CBlockIndex* GetBackgroundSyncTip() const EXCLUSIVE_LOCKS_REQUIRED(GetMutex()) { From 84c08e6b186bdce8048cef3fbea58581bb00e9a6 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Fri, 8 Nov 2024 19:59:00 +0000 Subject: [PATCH 02/44] add shell.nix --- shell.nix | 144 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) create mode 100644 shell.nix diff --git a/shell.nix b/shell.nix new file mode 100644 index 000000000000..6a75257493ef --- /dev/null +++ b/shell.nix @@ -0,0 +1,144 @@ +# Copyright 0xB10C, willcl-ark +{ pkgs ? import (fetchTarball "https://github.com/nixos/nixpkgs/archive/nixos-unstable.tar.gz") {}, + spareCores ? 0, + withClang ? false, + withDebug ? false, +}: +let + inherit (pkgs.lib) optionals strings; + inherit (pkgs) stdenv; + + # Add mlc binary fetching + mlcBinary = pkgs.fetchurl { + url = "https://github.com/becheran/mlc/releases/download/v0.18.0/mlc-x86_64-linux"; + sha256 = "sha256-jbdp+UlFybBE+o567L398hbcWHsG8aQGqYYf5h9JRkw="; + }; + # Hyperfine + # Included here because we need master for the `--conclude` flag from pr 719 + hyperfine = pkgs.rustPlatform.buildRustPackage rec { + pname = "hyperfine"; + name = "hyperfine"; + version = "e3e86174d9e11dd3a8951990f279c3b85f5fc0b9"; + + src = pkgs.fetchFromGitHub { + owner = "sharkdp"; + repo = "hyperfine"; + rev = version; + sha256 = "sha256-WCc7gURd8dFgUC8moxB7y16e1jNKtImwsfXnqU36IrE="; + }; + + nativeBuildInputs = with pkgs; [ sqlite ]; + + cargoHash = "sha256-E46//75Dgg+XClhD2iV86PYYwEE7bLeYMLK5UkyRpyg="; + + meta = with pkgs.lib; { + description = "A command-line benchmarking tool."; + homepage = "https://github.com/sharkdp/hyperfine"; + license = licenses.mit; + }; + }; + + # Create a derivation for mlc + mlc = pkgs.runCommand "mlc" {} '' + mkdir -p $out/bin + cp ${mlcBinary} $out/bin/mlc + chmod +x $out/bin/mlc + ''; + + binDirs = + [ "\$PWD/build/src" ]; + configureFlags = + [ "--with-boost-libdir=$NIX_BOOST_LIB_DIR" ] + ++ optionals withClang [ "CXX=clang++" "CC=clang" ] + ++ optionals withDebug [ "--enable-debug" ]; + jobs = + if (strings.hasSuffix "linux" builtins.currentSystem) then "$(($(nproc)-${toString spareCores}))" + else if (strings.hasSuffix "darwin" builtins.currentSystem) then "$(($(sysctl -n hw.physicalcpu)-${toString spareCores}))" + else "6"; +in pkgs.mkShell { + nativeBuildInputs = with pkgs; [ + autoconf + automake + libtool + pkg-config + boost + libevent + zeromq + sqlite + clang_18 + + # tests + hexdump + + # compiler output caching per + # https://github.com/bitcoin/bitcoin/blob/master/doc/productivity.md#cache-compilations-with-ccache + ccache + + # for newer cmake building + cmake + + # depends + byacc + + # debugging + gdb + + # tracing + libsystemtap + linuxPackages.bpftrace + linuxPackages.bcc + + ]; + buildInputs = with pkgs; [ + just + bash + + # lint requirements + cargo + git + mlc + ruff + rustc + rustup + shellcheck + python310 + uv + + # Benchmarking + flamegraph + hyperfine + jq + linuxKernel.packages.linux_6_6.perf + perf-tools + ]; + + # Modifies the Nix clang++ wrapper to avoid warning: + # "_FORTIFY_SOURCE requires compiling with optimization (-O)" + hardeningDisable = if withDebug then [ "all" ] else [ ]; + + shellHook = '' + echo "Bitcoin Core build nix-shell" + echo "" + echo "Setting up python venv" + + uv venv --python 3.10 + source .venv/bin/activate + uv pip install -r pyproject.toml + + BCC_EGG=${pkgs.linuxPackages.bcc}/${pkgs.python3.sitePackages}/bcc-${pkgs.linuxPackages.bcc.version}-py3.${pkgs.python3.sourceVersion.minor}.egg + + echo "adding bcc egg to PYTHONPATH: $BCC_EGG" + if [ -f $BCC_EGG ]; then + export PYTHONPATH="$PYTHONPATH:$BCC_EGG" + echo "" + else + echo "The bcc egg $BCC_EGG does not exist. Maybe the python or bcc version is different?" + fi + + echo "adding ${builtins.concatStringsSep ":" binDirs} to \$PATH to make running built binaries more natural" + export PATH=$PATH:${builtins.concatStringsSep ":" binDirs}; + + rustup default stable + rustup component add rustfmt + ''; +} From 0e10d8996c8c3dc7c52de773db16c478da43db27 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Fri, 8 Nov 2024 20:00:10 +0000 Subject: [PATCH 03/44] add uv for python --- pyproject.toml | 13 +++ requirements.txt | 24 +++++ uv.lock | 251 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 288 insertions(+) create mode 100644 pyproject.toml create mode 100644 requirements.txt create mode 100644 uv.lock diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000000..51bfa566a5c5 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,13 @@ +[project] +name = "bitcoin-core-deps" +version = "0.1.0" +dependencies = [ + "codespell==2.2.6", + "lief==0.13.2", + "mypy==1.4.1", + "pyzmq==25.1.0", + # Removing in favour of packaged nixpkgs bin which is not dynamically linked + # "ruff==0.5.5", + "vulture==2.6", + "pyperf" +] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000000..f972af642ae9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,24 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml -o requirements.txt +codespell==2.2.6 + # via bitcoin-core-deps (pyproject.toml) +lief==0.13.2 + # via bitcoin-core-deps (pyproject.toml) +mypy==1.4.1 + # via bitcoin-core-deps (pyproject.toml) +mypy-extensions==1.0.0 + # via mypy +psutil==6.1.0 + # via pyperf +pyperf==2.8.0 + # via bitcoin-core-deps (pyproject.toml) +pyzmq==25.1.0 + # via bitcoin-core-deps (pyproject.toml) +toml==0.10.2 + # via vulture +tomli==2.0.2 + # via mypy +typing-extensions==4.12.2 + # via mypy +vulture==2.6 + # via bitcoin-core-deps (pyproject.toml) diff --git a/uv.lock b/uv.lock new file mode 100644 index 000000000000..090e5f1cb4f1 --- /dev/null +++ b/uv.lock @@ -0,0 +1,251 @@ +version = 1 +requires-python = ">=3.10" + +[[package]] +name = "bitcoin-core-deps" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "codespell" }, + { name = "lief" }, + { name = "mypy" }, + { name = "pyperf" }, + { name = "pyzmq" }, + { name = "vulture" }, +] + +[package.metadata] +requires-dist = [ + { name = "codespell", specifier = "==2.2.6" }, + { name = "lief", specifier = "==0.13.2" }, + { name = "mypy", specifier = "==1.4.1" }, + { name = "pyperf" }, + { name = "pyzmq", specifier = "==25.1.0" }, + { name = "vulture", specifier = "==2.6" }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, +] + +[[package]] +name = "codespell" +version = "2.2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/97/df3e00b4d795c96233e35d269c211131c5572503d2270afb6fed7d859cc2/codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9", size = 300968 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/e0/5437cc96b74467c4df6e13b7128cc482c48bb43146fb4c11cf2bcd604e1f/codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07", size = 301382 }, +] + +[[package]] +name = "lief" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/e2/c4125c279eb2a23ecc86cdb188ed06e9d81a9c700e9412f9be866afc2c7d/lief-0.13.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:0390cfaaf0e9aed46bebf26f00f34852768f76bc7f90abf7ceb384566200e5f5", size = 3424746 }, + { url = "https://files.pythonhosted.org/packages/5f/d6/72235d648c6630c37ef52b9f6f4e2f3337842bc4b08c75abcae3052b2c17/lief-0.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5581bf0072c1e7a9ea2fb2e2252b8582016e8b298804b5461e552b402c9cd4e9", size = 3249141 }, + { url = "https://files.pythonhosted.org/packages/d7/cc/9895dff094cad3e88636195640b4b47caefe3d300d3f37b653bd109348df/lief-0.13.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:dbbf2fb3d7807e815f345c77e287da162e081100f059ec03005995befc295d7f", size = 3793938 }, + { url = "https://files.pythonhosted.org/packages/0d/1b/f4bf63bfce187ae210980bdd1a20ea7d8e080381eef09e7d26c585eaa614/lief-0.13.2-cp310-cp310-manylinux_2_24_x86_64.whl", hash = "sha256:d344d37334c2b488dc02f04cb13c22cd61aa065eeb9bca7424588e0c8c23bdfb", size = 4045328 }, + { url = "https://files.pythonhosted.org/packages/2c/2a/abac2e42c3cc56f2b5020e58b99f700c4d3236d49451607add0f628d737b/lief-0.13.2-cp310-cp310-win32.whl", hash = "sha256:bc041b28b94139843a33c014e355822a9276b35f3c5ae10d82da56bf572f8222", size = 2493454 }, + { url = "https://files.pythonhosted.org/packages/ed/14/34a12787dc4328227e0e84a97db8142aa1e2b33e0aabc538e93abf7d6e5a/lief-0.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:01d4075bbc3541e9dd3ef008045fa1eb128294a0c5b0c1f69ce60d8948d248c7", size = 3089949 }, + { url = "https://files.pythonhosted.org/packages/2e/95/9d7377095fb7cf195aca8f64d9696705c71884dcba16663472ce17139b9c/lief-0.13.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6570dacebe107ad60c2ba0968d1a865d316009d43cc85af3719d3eeb0911abf3", size = 3424752 }, + { url = "https://files.pythonhosted.org/packages/00/2b/7ac8e15ca198a5c50397aec32102e81ef97fd573a4285ee889ec9084d110/lief-0.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7ce2e3f7c791efba327c2bb3499dbef81e682027109045a9bae696c62e2aeeb0", size = 3249263 }, + { url = "https://files.pythonhosted.org/packages/d6/8d/b50cc4ad91278015e5ac18fc76f32098ed6887c371bef6f4997af4cb97c9/lief-0.13.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:11ab900e0644b6735ecdef2bbd04439b4866a527650fc054470c195d6cfe2917", size = 3792343 }, + { url = "https://files.pythonhosted.org/packages/6b/bd/ea25e9c8ff0a55b5534e5881fa6e5eeca0ed3eeb7c772a276984b8c182d9/lief-0.13.2-cp311-cp311-manylinux_2_24_x86_64.whl", hash = "sha256:042ad2105a136b11a7494b9af8178468e8cb32b8fa2a0a55cb659a5605aeb069", size = 4045112 }, + { url = "https://files.pythonhosted.org/packages/d9/06/ddacd724f65fa8e7eca438c335aa77878a260fbc714cdba252387c33a4cc/lief-0.13.2-cp311-cp311-win32.whl", hash = "sha256:1ce289b6ab3cf4be654270007e8a2c0d2e42116180418c29d3ce83762955de63", size = 2493336 }, + { url = "https://files.pythonhosted.org/packages/82/95/1de9a497946fed9d15f847d8a4a0630dfda6d186c044f8731f53d0d3d758/lief-0.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:eccb248ffb598e410fd2ef7c1f171a3cde57a40c9bb8c4fa15d8e7b90eb4eb2d", size = 3090328 }, +] + +[[package]] +name = "mypy" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/28/d8a8233ff167d06108e53b7aefb4a8d7350adbbf9d7abd980f17fdb7a3a6/mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b", size = 2855162 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/3b/1c7363863b56c059f60a1dfdca9ac774a22ba64b7a4da0ee58ee53e5243f/mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8", size = 10451043 }, + { url = "https://files.pythonhosted.org/packages/a7/24/6f0df1874118839db1155fed62a4bd7e80c181367ff8ea07d40fbaffcfb4/mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878", size = 9542079 }, + { url = "https://files.pythonhosted.org/packages/04/5c/deeac94fcccd11aa621e6b350df333e1b809b11443774ea67582cc0205da/mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd", size = 11974913 }, + { url = "https://files.pythonhosted.org/packages/e5/2f/de3c455c54e8cf5e37ea38705c1920f2df470389f8fc051084d2dd8c9c59/mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc", size = 12044492 }, + { url = "https://files.pythonhosted.org/packages/e7/d3/6f65357dcb68109946de70cd55bd2e60f10114f387471302f48d54ff5dae/mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1", size = 8831655 }, + { url = "https://files.pythonhosted.org/packages/94/01/e34e37a044325af4d4af9825c15e8a0d26d89b5a9624b4d0908449d3411b/mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462", size = 10338636 }, + { url = "https://files.pythonhosted.org/packages/92/58/ccc0b714ecbd1a64b34d8ce1c38763ff6431de1d82551904ecc3711fbe05/mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258", size = 9444172 }, + { url = "https://files.pythonhosted.org/packages/73/72/dfc0b46e6905eafd598e7c48c0c4f2e232647e4e36547425c64e6c850495/mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2", size = 11855450 }, + { url = "https://files.pythonhosted.org/packages/66/f4/60739a2d336f3adf5628e7c9b920d16e8af6dc078550d615e4ba2a1d7759/mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7", size = 11928679 }, + { url = "https://files.pythonhosted.org/packages/8c/26/6ff2b55bf8b605a4cc898883654c2ca4dd4feedf0bb04ecaacf60d165cde/mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01", size = 8831134 }, + { url = "https://files.pythonhosted.org/packages/3d/9a/e13addb8d652cb068f835ac2746d9d42f85b730092f581bb17e2059c28f1/mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4", size = 2451741 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "psutil" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/10/2a30b13c61e7cf937f4adf90710776b7918ed0a9c434e2c38224732af310/psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a", size = 508565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/9e/8be43078a171381953cfee33c07c0d628594b5dbfc5157847b85022c2c1b/psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688", size = 247762 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/313e80644ea407f04f6602a9e23096540d9dc1878755f3952ea8d3d104be/psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e", size = 248777 }, + { url = "https://files.pythonhosted.org/packages/65/8e/bcbe2025c587b5d703369b6a75b65d41d1367553da6e3f788aff91eaf5bd/psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38", size = 284259 }, + { url = "https://files.pythonhosted.org/packages/58/4d/8245e6f76a93c98aab285a43ea71ff1b171bcd90c9d238bf81f7021fb233/psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b", size = 287255 }, + { url = "https://files.pythonhosted.org/packages/27/c2/d034856ac47e3b3cdfa9720d0e113902e615f4190d5d1bdb8df4b2015fb2/psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a", size = 288804 }, + { url = "https://files.pythonhosted.org/packages/ea/55/5389ed243c878725feffc0d6a3bc5ef6764312b6fc7c081faaa2cfa7ef37/psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e", size = 250386 }, + { url = "https://files.pythonhosted.org/packages/11/91/87fa6f060e649b1e1a7b19a4f5869709fbf750b7c8c262ee776ec32f3028/psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be", size = 254228 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pyperf" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "psutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/2a/758b3c4cc9843bd385bc595b777345fbf4cd00733b7830cdff43e30002c0/pyperf-2.8.0.tar.gz", hash = "sha256:b30a20465819daf102b6543b512f6799a5a879ff2a123981e6cd732d0e6a7a79", size = 225186 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/f7/bb8965520a9b0a3d720b282e67b5cb7f3305b96e4bacaee2794550e67e94/pyperf-2.8.0-py3-none-any.whl", hash = "sha256:1a775b5a09882f18bf876430ef78e07646f773f50774546f5f6a8b34d60e3968", size = 142508 }, +] + +[[package]] +name = "pyzmq" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/9c/2b2614b0b86ff703b3a33ea5e044923bd7d100adc8c829d579a9b71ea9e7/pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957", size = 1224640 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/17/6a70f84b79e361af34f6c99064ecf9e87112c4c48b9c7ea78f8e680b57d8/pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d", size = 1826810 }, + { url = "https://files.pythonhosted.org/packages/2f/53/fc7dbdd32e275aee0961e2a5bed1bb64223846f959fd6e0c9a39aab08eed/pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101", size = 1236489 }, + { url = "https://files.pythonhosted.org/packages/04/0b/bff5b6c1680e248bad2df8248a060645709fe2aef9689e9f7c81c587bad4/pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc", size = 864304 }, + { url = "https://files.pythonhosted.org/packages/5e/9e/32074bd8bcf2a5cf282d8817458fd5479c68b487b6c3a5d4627711ad38f5/pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f", size = 1116061 }, + { url = "https://files.pythonhosted.org/packages/fa/fb/a114ba641eb873c165106d3c8ee75eb49d6ea3204168808708d866de360d/pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89", size = 1065090 }, + { url = "https://files.pythonhosted.org/packages/ca/db/f9976803f1a660e753d0f2426065975bad5db8272fd5284efaf488dc0ce1/pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3", size = 1062464 }, + { url = "https://files.pythonhosted.org/packages/94/3a/c3964c0a86c3535ae240799d3b7c8e13527e7a092080dda9012b1401fa86/pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9", size = 1391159 }, + { url = "https://files.pythonhosted.org/packages/a1/87/92556ffa8fbe7dc497d847e39d5c46134f9ad047b23f5bcefc8fbd0c2c9c/pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80", size = 1721009 }, + { url = "https://files.pythonhosted.org/packages/66/96/129706be681649f43bde93811416f566acfefcd3fb18156d5df349c360ab/pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c", size = 1611290 }, + { url = "https://files.pythonhosted.org/packages/64/db/e19f69fe9b1a4e53f6382274f553358e2e7305d2a2b9d9db36087bf52d5e/pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1", size = 880070 }, + { url = "https://files.pythonhosted.org/packages/32/e4/ce4f94009f84c2a688082c2674d490d2e20e0c9058087f5358a2bf29ddf1/pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba", size = 1137827 }, + { url = "https://files.pythonhosted.org/packages/bb/80/ae792378f98d6d0e39c975c334603d3d2535f7897707fe91f31d37f94fdb/pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d", size = 1816147 }, + { url = "https://files.pythonhosted.org/packages/5a/b6/3c2ddd09aa24352e4f6aade53e9b9a1816c0774c844f11b1a2f508ddc0be/pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a", size = 1230845 }, + { url = "https://files.pythonhosted.org/packages/26/bb/80535157e8811095901f98688839092afb6dcaf2ff154aa8fa2e575f540d/pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883", size = 866042 }, + { url = "https://files.pythonhosted.org/packages/7c/65/bccec1eae7c0e089d90648f350e6c2ff40ccb8c6d1b929548f4cd304b1f7/pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b", size = 1116285 }, + { url = "https://files.pythonhosted.org/packages/b7/cb/2a36d3eed310efb342fbb7b4adf6b05f46401c4b937154bd1c9b703314e0/pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5", size = 1066280 }, + { url = "https://files.pythonhosted.org/packages/66/f5/15db4c297957f049cd4dcd35eb7fbe9098a72489e0abdb289c529d7327cc/pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc", size = 1061673 }, + { url = "https://files.pythonhosted.org/packages/fa/40/7729719e38324e5e9f2e77f6131fc253f063a3741eab170ef610196098e8/pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994", size = 1393337 }, + { url = "https://files.pythonhosted.org/packages/fd/12/0324dcb2554cd3f2ebb851ddbfbac27c4bb384394ba4a8978dec093fe71d/pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c", size = 1723679 }, + { url = "https://files.pythonhosted.org/packages/04/15/b8ab292f0b74e0440547185fb67167c87454a2b3be429d64de569f7142a2/pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a", size = 1612761 }, + { url = "https://files.pythonhosted.org/packages/22/3e/3670e36c6f42e124492ddd2af550ca13bd4a9f1edd562e1ae7c35a1f230b/pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425", size = 878704 }, + { url = "https://files.pythonhosted.org/packages/a0/db/4e586c563b48dec09b8f7c2728b905e29db61af89b5c58e4eba9ad36fdec/pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6", size = 1135692 }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, +] + +[[package]] +name = "tomli" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/b9/de2a5c0144d7d75a57ff355c0c24054f965b2dc3036456ae03a51ea6264b/tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed", size = 16096 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/db/ce8eda256fa131af12e0a76d481711abe4681b6923c27efb9a255c9e4594/tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38", size = 13237 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "vulture" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "toml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/18/e51a6e575047d19dbcd7394f05b2afa6191fe9ce30bd5bcfb3f850501e0c/vulture-2.6.tar.gz", hash = "sha256:2515fa848181001dc8a73aba6a01a1a17406f5d372f24ec7f7191866f9f4997e", size = 53777 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/9d/3c4df0c704ddb5ecf07fcd92cfe6d4a5dc000b7f5459afcb7e98a2ffea1e/vulture-2.6-py2.py3-none-any.whl", hash = "sha256:e792e903ccc063ec4873a8979dcf11b51ea3d65a2d3b31c113d47be48f0cdcae", size = 26494 }, +] From 87f028e29bcdac17d57de41022f445b190f2bb86 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Fri, 8 Nov 2024 20:00:18 +0000 Subject: [PATCH 04/44] add justfile --- justfile | 78 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 justfile diff --git a/justfile b/justfile new file mode 100644 index 000000000000..8eae95d1401a --- /dev/null +++ b/justfile @@ -0,0 +1,78 @@ +set shell := ["bash", "-uc"] + +os := os() + +default: + just --list + +# Build default project +[group('build')] +build *args: clean + cmake -B build {{ args }} + cmake --build build -j {{ num_cpus() }} + +# Build with all optional modules +[group('build')] +build-dev *args: clean + cmake -B build --preset dev-mode {{ args }} + cmake --build build -j {{ num_cpus() }} + +# Build for the CI, including bench_bitcoin +[private] +[group('ci')] +build-ci: clean + cmake -B build -DBUILD_BENCH=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo -DAPPEND_CPPFLAGS="-fno-omit-frame-pointer" + cmake --build build -j {{ num_cpus() }} + +# Re-build current config +[group('build')] +rebuild: + cmake --build build -j {{ num_cpus() }} + +# Clean build dir using git clean -dfx +[group('build')] +clean: + git clean -dfx + +# Run unit tests +[group('test')] +test-unit: + ctest --test-dir build -j {{ num_cpus() }} + +# Run all functional tests +[group('test')] +test-func: + build/test/functional/test_runner.py -j {{ num_cpus() }} + +# Run all unit and functional tests +[group('test')] +test: test-unit test-func + +# Run a single functional test (filename.py) +[group('test')] +test-func1 test: + build/test/functional/test_runner.py {{ test }} + +# Run a single unit test suite +[group('test')] +test-unit1 suite: + build/src/test/test_bitcoin --log_level=all --run_test={{ suite }} + +# Run benchmarks +[group('perf')] +bench: + build/src/bench/bench_bitcoin + +# Run the lint job +lint: + #!/usr/bin/env bash + cd test/lint/test_runner/ + cargo fmt + cargo clippy + export COMMIT_RANGE="$( git rev-list --max-count=1 --merges HEAD )..HEAD" + RUST_BACKTRACE=1 cargo run + +# Run assumeutxo CI workflow +[group('ci')] +run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: + ./bench-ci/run-assumeutxo-signet-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} From 0dca1417990f9ab7e6b3197cd144dcea51886c77 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Fri, 8 Nov 2024 20:00:29 +0000 Subject: [PATCH 05/44] add benchmarking ci workflows Co-authored-by: David Gumberg --- .github/workflows/ci.yml | 326 ++++---------------------- .github/workflows/publish-results.yml | 256 ++++++++++++++++++++ bench-ci/run-assumeutxo-bench.sh | 133 +++++++++++ justfile | 11 +- 4 files changed, 442 insertions(+), 284 deletions(-) create mode 100644 .github/workflows/publish-results.yml create mode 100755 bench-ci/run-assumeutxo-bench.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 740d31ae5683..17d96b294337 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,302 +1,66 @@ -# Copyright (c) 2023 The Bitcoin Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - name: CI on: - # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request. pull_request: - # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push. - push: branches: - - '**' - tags-ignore: - - '**' - -concurrency: - group: ${{ github.event_name != 'pull_request' && github.run_id || github.ref }} - cancel-in-progress: true - -env: - CI_FAILFAST_TEST_LEAVE_DANGLING: 1 # GHA does not care about dangling processes and setting this variable avoids killing the CI script itself on error - MAKEJOBS: '-j10' - + - master jobs: - test-each-commit: - name: 'test each commit' - runs-on: ubuntu-24.04 - if: github.event_name == 'pull_request' && github.event.pull_request.commits != 1 - timeout-minutes: 360 # Use maximum time, see https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes. Assuming a worst case time of 1 hour per commit, this leads to a --max-count=6 below. - env: - MAX_COUNT: 6 - steps: - - name: Determine fetch depth - run: echo "FETCH_DEPTH=$((${{ github.event.pull_request.commits }} + 2))" >> "$GITHUB_ENV" - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: ${{ env.FETCH_DEPTH }} - - name: Determine commit range - run: | - # Checkout HEAD~ and find the test base commit - # Checkout HEAD~ because it would be wasteful to rerun tests on the PR - # head commit that are already run by other jobs. - git checkout HEAD~ - # Figure out test base commit by listing ancestors of HEAD, excluding - # ancestors of the most recent merge commit, limiting the list to the - # newest MAX_COUNT ancestors, ordering it from oldest to newest, and - # taking the first one. - # - # If the branch contains up to MAX_COUNT ancestor commits after the - # most recent merge commit, all of those commits will be tested. If it - # contains more, only the most recent MAX_COUNT commits will be - # tested. - # - # In the command below, the ^@ suffix is used to refer to all parents - # of the merge commit as described in: - # https://git-scm.com/docs/git-rev-parse#_other_rev_parent_shorthand_notations - # and the ^ prefix is used to exclude these parents and all their - # ancestors from the rev-list output as described in: - # https://git-scm.com/docs/git-rev-list - MERGE_BASE=$(git rev-list -n1 --merges HEAD) - EXCLUDE_MERGE_BASE_ANCESTORS= - # MERGE_BASE can be empty due to limited fetch-depth - if test -n "$MERGE_BASE"; then - EXCLUDE_MERGE_BASE_ANCESTORS=^${MERGE_BASE}^@ - fi - echo "TEST_BASE=$(git rev-list -n$((${{ env.MAX_COUNT }} + 1)) --reverse HEAD $EXCLUDE_MERGE_BASE_ANCESTORS | head -1)" >> "$GITHUB_ENV" - - run: | - sudo apt-get update - sudo apt-get install clang ccache build-essential cmake pkg-config python3-zmq libevent-dev libboost-dev libsqlite3-dev libdb++-dev systemtap-sdt-dev libzmq3-dev qtbase5-dev qttools5-dev qttools5-dev-tools qtwayland5 libqrencode-dev -y - - name: Compile and run tests - run: | - # Run tests on commits after the last merge commit and before the PR head commit - # Use clang++, because it is a bit faster and uses less memory than g++ - git rebase --exec "echo Running test-one-commit on \$( git log -1 ) && CC=clang CXX=clang++ cmake -B build -DWERROR=ON -DWITH_ZMQ=ON -DBUILD_GUI=ON -DBUILD_BENCH=ON -DBUILD_FUZZ_BINARY=ON -DWITH_BDB=ON -DWITH_USDT=ON -DCMAKE_CXX_FLAGS='-Wno-error=unused-member-function' && cmake --build build -j $(nproc) && ctest --output-on-failure --stop-on-failure --test-dir build -j $(nproc) && ./build/test/functional/test_runner.py -j $(( $(nproc) * 2 ))" ${{ env.TEST_BASE }} - - macos-native-arm64: - name: ${{ matrix.job-name }} - # Use latest image, but hardcode version to avoid silent upgrades (and breaks). - # See: https://github.com/actions/runner-images#available-images. - runs-on: macos-14 - - # When a contributor maintains a fork of the repo, any pull request they make - # to their own fork, or to the main repository, will trigger two CI runs: - # one for the branch push and one for the pull request. - # This can be avoided by setting SKIP_BRANCH_PUSH=true as a custom env variable - # in Github repository settings. - if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }} - - timeout-minutes: 120 - + assumeutxo: strategy: - fail-fast: false matrix: - job-type: [standard, fuzz] include: - - job-type: standard - file-env: './ci/test/00_setup_env_mac_native.sh' - job-name: 'macOS 14 native, arm64, no depends, sqlite only, gui' - - job-type: fuzz - file-env: './ci/test/00_setup_env_mac_native_fuzz.sh' - job-name: 'macOS 14 native, arm64, fuzz' - + - network: signet + timeout: 20 + utxo_path: /var/lib/bitcoin/utxo-signet-160000.dat + - network: mainnet + timeout: 600 + utxo_path: /var/lib/bitcoin/utxo-840000.dat + runs-on: [self-hosted, linux, x64] + timeout-minutes: ${{ matrix.timeout }} env: - DANGER_RUN_CI_ON_HOST: 1 - BASE_ROOT_DIR: ${{ github.workspace }} - + NIX_PATH: nixpkgs=channel:nixos-unstable + UTXO_PATH: ${{ matrix.utxo_path }} + BASE_SHA: ${{ github.event.pull_request.base.sha }} steps: - - name: Checkout + - name: Checkout repo uses: actions/checkout@v4 - - - name: Clang version - run: | - sudo xcode-select --switch /Applications/Xcode_15.0.app - clang --version - - - name: Install Homebrew packages - env: - HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 + with: + fetch-depth: 1 + - name: Fetch base commit run: | - # A workaround for "The `brew link` step did not complete successfully" error. - brew install --quiet python@3 || brew link --overwrite python@3 - brew install --quiet coreutils ninja pkgconf gnu-getopt ccache boost libevent zeromq qt@5 qrencode - - - name: Set Ccache directory - run: echo "CCACHE_DIR=${RUNNER_TEMP}/ccache_dir" >> "$GITHUB_ENV" + # Export the commit hash for use in later steps + echo "CHECKOUT_COMMIT=$(git rev-parse HEAD)" >> "$GITHUB_ENV" - - name: Restore Ccache cache - id: ccache-cache - uses: actions/cache/restore@v4 + # Fetch down to the base SHA for the base benchmark comparison as we + # only checked out to depth 1 + git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }} + - uses: cachix/install-nix-action@v27 with: - path: ${{ env.CCACHE_DIR }} - key: ${{ github.job }}-${{ matrix.job-type }}-ccache-${{ github.run_id }} - restore-keys: ${{ github.job }}-${{ matrix.job-type }}-ccache- - - - name: CI script - run: ./ci/test_run_all.sh + nix_path: $NIX_PATH + - name: Run AssumeUTXO ${{ matrix.network }} env: - FILE_ENV: ${{ matrix.file-env }} - - - name: Save Ccache cache - uses: actions/cache/save@v4 - if: github.event_name != 'pull_request' && steps.ccache-cache.outputs.cache-hit != 'true' - with: - path: ${{ env.CCACHE_DIR }} - # https://github.com/actions/cache/blob/main/tips-and-workarounds.md#update-a-cache - key: ${{ github.job }}-${{ matrix.job-type }}-ccache-${{ github.run_id }} - - win64-native: - name: ${{ matrix.job-name }} - # Use latest image, but hardcode version to avoid silent upgrades (and breaks). - # See: https://github.com/actions/runner-images#available-images. - runs-on: windows-2022 - - if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }} - - env: - PYTHONUTF8: 1 - TEST_RUNNER_TIMEOUT_FACTOR: 40 - - strategy: - fail-fast: false - matrix: - job-type: [standard, fuzz] - include: - - job-type: standard - generate-options: '-DBUILD_GUI=ON -DWITH_BDB=ON -DWITH_ZMQ=ON -DBUILD_BENCH=ON -DWERROR=ON' - job-name: 'Win64 native, VS 2022' - - job-type: fuzz - generate-options: '-DVCPKG_MANIFEST_NO_DEFAULT_FEATURES=ON -DVCPKG_MANIFEST_FEATURES="sqlite" -DBUILD_GUI=OFF -DBUILD_FOR_FUZZING=ON -DWERROR=ON' - job-name: 'Win64 native fuzz, VS 2022' - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Configure Developer Command Prompt for Microsoft Visual C++ - # Using microsoft/setup-msbuild is not enough. - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: x64 - - - name: Get tool information + TMP_DATADIR: "${{ runner.temp }}/base_datadir" run: | - cmake -version | Tee-Object -FilePath "cmake_version" - Write-Output "---" - msbuild -version | Tee-Object -FilePath "msbuild_version" - $env:VCToolsVersion | Tee-Object -FilePath "toolset_version" - py -3 --version - Write-Host "PowerShell version $($PSVersionTable.PSVersion.ToString())" - - - name: Using vcpkg with MSBuild - run: | - Set-Location "$env:VCPKG_INSTALLATION_ROOT" - Add-Content -Path "triplets\x64-windows.cmake" -Value "set(VCPKG_BUILD_TYPE release)" - Add-Content -Path "triplets\x64-windows-static.cmake" -Value "set(VCPKG_BUILD_TYPE release)" - - - name: vcpkg tools cache - uses: actions/cache@v4 + env + mkdir -p "$TMP_DATADIR" + nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json" + - uses: actions/upload-artifact@v4 with: - path: C:/vcpkg/downloads/tools - key: ${{ github.job }}-vcpkg-tools - - - name: Restore vcpkg binary cache - uses: actions/cache/restore@v4 - id: vcpkg-binary-cache + name: result-${{ matrix.network }} + path: "${{ runner.temp }}/results.json" + - uses: actions/upload-artifact@v4 with: - path: ~/AppData/Local/vcpkg/archives - key: ${{ github.job }}-vcpkg-binary-${{ hashFiles('cmake_version', 'msbuild_version', 'toolset_version', 'vcpkg.json') }} - - - name: Generate build system - run: | - cmake -B build --preset vs2022-static -DCMAKE_TOOLCHAIN_FILE="$env:VCPKG_INSTALLATION_ROOT\scripts\buildsystems\vcpkg.cmake" ${{ matrix.generate-options }} - - - name: Save vcpkg binary cache - uses: actions/cache/save@v4 - if: github.event_name != 'pull_request' && steps.vcpkg-binary-cache.outputs.cache-hit != 'true' && matrix.job-type == 'standard' - with: - path: ~/AppData/Local/vcpkg/archives - key: ${{ github.job }}-vcpkg-binary-${{ hashFiles('cmake_version', 'msbuild_version', 'toolset_version', 'vcpkg.json') }} - - - name: Build - working-directory: build - run: | - cmake --build . -j $env:NUMBER_OF_PROCESSORS --config Release - - - name: Run test suite - if: matrix.job-type == 'standard' - working-directory: build - run: | - ctest --output-on-failure --stop-on-failure -j $env:NUMBER_OF_PROCESSORS -C Release - - - name: Run functional tests - if: matrix.job-type == 'standard' - working-directory: build - env: - BITCOIND: '${{ github.workspace }}\build\src\Release\bitcoind.exe' - BITCOINCLI: '${{ github.workspace }}\build\src\Release\bitcoin-cli.exe' - BITCOINUTIL: '${{ github.workspace }}\build\src\Release\bitcoin-util.exe' - BITCOINWALLET: '${{ github.workspace }}\build\src\Release\bitcoin-wallet.exe' - TEST_RUNNER_EXTRA: ${{ github.event_name != 'pull_request' && '--extended' || '' }} - shell: cmd - run: py -3 test\functional\test_runner.py --jobs %NUMBER_OF_PROCESSORS% --ci --quiet --tmpdirprefix=%RUNNER_TEMP% --combinedlogslen=99999999 --timeout-factor=%TEST_RUNNER_TIMEOUT_FACTOR% %TEST_RUNNER_EXTRA% - - - name: Clone corpora - if: matrix.job-type == 'fuzz' - run: | - git clone --depth=1 https://github.com/bitcoin-core/qa-assets "$env:RUNNER_TEMP\qa-assets" - Set-Location "$env:RUNNER_TEMP\qa-assets" - Write-Host "Using qa-assets repo from commit ..." - git log -1 - - - name: Run fuzz tests - if: matrix.job-type == 'fuzz' - working-directory: build + name: flamegraph-${{ matrix.network }} + path: "**/*-flamegraph.html" + - name: Write GitHub and runner context files env: - BITCOINFUZZ: '${{ github.workspace }}\build\src\test\fuzz\Release\fuzz.exe' - shell: cmd + GITHUB_CONTEXT: ${{ toJSON(github) }} + RUNNER_CONTEXT: ${{ toJSON(runner) }} run: | - py -3 test\fuzz\test_runner.py --par %NUMBER_OF_PROCESSORS% --loglevel DEBUG %RUNNER_TEMP%\qa-assets\fuzz_corpora - - asan-lsan-ubsan-integer-no-depends-usdt: - name: 'ASan + LSan + UBSan + integer, no depends, USDT' - runs-on: ubuntu-24.04 # has to match container in ci/test/00_setup_env_native_asan.sh for tracing tools - if: ${{ vars.SKIP_BRANCH_PUSH != 'true' || github.event_name == 'pull_request' }} - timeout-minutes: 120 - env: - FILE_ENV: "./ci/test/00_setup_env_native_asan.sh" - DANGER_CI_ON_HOST_CACHE_FOLDERS: 1 - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Set Ccache directory - run: echo "CCACHE_DIR=${RUNNER_TEMP}/ccache_dir" >> "$GITHUB_ENV" - - - name: Set base root directory - run: echo "BASE_ROOT_DIR=${RUNNER_TEMP}" >> "$GITHUB_ENV" - - - name: Restore Ccache cache - id: ccache-cache - uses: actions/cache/restore@v4 - with: - path: ${{ env.CCACHE_DIR }} - key: ${{ github.job }}-ccache-${{ github.run_id }} - restore-keys: ${{ github.job }}-ccache- - - - name: Enable bpfcc script - # In the image build step, no external environment variables are available, - # so any settings will need to be written to the settings env file: - run: sed -i "s|\${INSTALL_BCC_TRACING_TOOLS}|true|g" ./ci/test/00_setup_env_native_asan.sh - - - name: CI script - run: ./ci/test_run_all.sh - - - name: Save Ccache cache - uses: actions/cache/save@v4 - if: github.event_name != 'pull_request' && steps.ccache-cache.outputs.cache-hit != 'true' + mkdir contexts + echo "$GITHUB_CONTEXT" | nix-shell -p jq --command "jq 'del(.token)' > contexts/github.json" + echo "$RUNNER_CONTEXT" > contexts/runner.json + - name: Upload context metadata as artifact + uses: actions/upload-artifact@v4 with: - path: ${{ env.CCACHE_DIR }} - # https://github.com/actions/cache/blob/main/tips-and-workarounds.md#update-a-cache - key: ${{ github.job }}-ccache-${{ github.run_id }} + name: run-metadata-${{ matrix.network }} + path: ./contexts/ diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml new file mode 100644 index 000000000000..8b4b3577a2c9 --- /dev/null +++ b/.github/workflows/publish-results.yml @@ -0,0 +1,256 @@ +name: Publish Results +on: + workflow_run: + workflows: ["CI"] + types: [completed] +jobs: + build: + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + permissions: + actions: read + contents: write + checks: read + env: + NETWORKS: "signet,mainnet" + steps: + - uses: actions/checkout@v4 + with: + ref: gh-pages + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + github-token: ${{ secrets.GH_PAT }} + run-id: ${{ github.event.workflow_run.id }} + - name: Extract artifacts + run: | + for network in ${NETWORKS//,/ }; do + if [ -d "result-${network}" ]; then + mkdir -p "${network}-results" + mv "result-${network}/results.json" "${network}-results/" + fi + + if [ -d "flamegraph-${network}" ]; then + mkdir -p "${network}-flamegraph" + mv "flamegraph-${network}"/* "${network}-flamegraph/" + fi + + if [ -d "run-metadata-${network}" ]; then + mkdir -p "${network}-metadata" + mv "run-metadata-${network}"/* "${network}-metadata/" + fi + done + - name: Organize results + id: organize + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + const networks = process.env.NETWORKS.split(','); + let prNumber = 'main'; + let runId; + + // First, extract metadata and get PR number + for (const network of networks) { + if (fs.existsSync(`${network}-metadata/github.json`)) { + const metadata = JSON.parse(fs.readFileSync(`${network}-metadata/github.json`, 'utf8')); + prNumber = metadata.event.pull_request?.number || prNumber; + runId = metadata.run_id; + } + } + + if (!runId) { + console.error('No valid metadata found for any network'); + process.exit(1); + } + + // Create directory structure + const resultDir = `results/pr-${prNumber}/${runId}`; + fs.mkdirSync(resultDir, { recursive: true }); + + // Now copy metadata files + for (const network of networks) { + if (fs.existsSync(`${network}-metadata/github.json`)) { + const metadataDir = `${resultDir}/${network}-metadata`; + fs.mkdirSync(metadataDir, { recursive: true }); + fs.copyFileSync(`${network}-metadata/github.json`, `${metadataDir}/github.json`); + } + } + + // Process each network's results + const combinedResults = { + results: [] + }; + + for (const network of networks) { + if (fs.existsSync(`${network}-results`)) { + const networkResults = JSON.parse(fs.readFileSync(`${network}-results/results.json`, 'utf8')); + + // Add network name to each result + networkResults.results.forEach(result => { + result.network = network; + combinedResults.results.push(result); + }); + + // Move flamegraphs + if (fs.existsSync(`${network}-flamegraph`)) { + fs.readdirSync(`${network}-flamegraph`).forEach(file => { + const sourceFile = `${network}-flamegraph/${file}`; + const targetFile = `${resultDir}/${network}-${file}`; + fs.copyFileSync(sourceFile, targetFile); + }); + } + } + } + + // Write combined results + fs.writeFileSync(`${resultDir}/results.json`, JSON.stringify(combinedResults, null, 2)); + + // Create index.html for this run + const indexHtml = ` + + + Benchmark Results + + + +
+

Benchmark Results

+
+

PR #${prNumber} - Run ${runId}

+ ${networks.map(network => ` +
+

${network} Results

+
+ ${combinedResults.results + .filter(result => result.network === network) + .map(result => { + const commitShortId = result.parameters.commit.slice(0, 8); + const flameGraphFile = `${network}-${result.parameters.commit}-flamegraph.html`; + const flameGraphPath = `${resultDir}/${flameGraphFile}`; + + return ` + + + + + + + + + + + + + + + + + + + + + +
BranchCommandMean (s)Std DevUser (s)System (s)
+ ${commitShortId} + ${result.command}${result.mean.toFixed(3)}${result.stddev?.toFixed(3) || 'N/A'}${result.user.toFixed(3)}${result.system.toFixed(3)}
+ + ${fs.existsSync(flameGraphPath) ? ` + + ` : ''} + `; + }).join('')} +
+
+ `).join('')} +
+
+ + `; + + fs.writeFileSync(`${resultDir}/index.html`, indexHtml); + + // Update main index.html + const prs = fs.readdirSync('results') + .filter(dir => dir.startsWith('pr-')) + .map(dir => ({ + pr: dir.replace('pr-', ''), + runs: fs.readdirSync(`results/${dir}`) + })); + + const mainIndexHtml = ` + + + Bitcoin Benchmark Results + + + +
+

Bitcoin Benchmark Results

+
+

Available Results

+
    + ${prs.map(({pr, runs}) => ` +
  • PR #${pr} +
      + ${runs.map(run => ` +
    • Run ${run}
    • + `).join('')} +
    +
  • + `).join('')} +
+
+
+ + `; + + fs.writeFileSync('index.html', mainIndexHtml); + + // Return the URL for the PR comment + const resultUrl = `https://${context.repo.owner}.github.io/${context.repo.name}/results/pr-${prNumber}/${runId}/index.html`; + core.setOutput('result-url', resultUrl); + return resultUrl; + - name: Upload Pages artifact + uses: actions/upload-pages-artifact@v3 + with: + path: results + - name: Commit and push to gh-pages + run: | + git config --global user.name 'github-actions[bot]' + git config --global user.email 'github-actions[bot]@users.noreply.github.com' + git add results/ + git add index.html + git commit -m "Update benchmark results from run ${{ github.event.workflow_run.id }}" + git push origin gh-pages + comment-pr: + needs: build + runs-on: ubuntu-latest + permissions: + pull-requests: write + actions: read + steps: + - name: Download metadata artifact + uses: actions/download-artifact@v4 + with: + pattern: run-metadata-* + github-token: ${{ secrets.GITHUB_TOKEN }} + run-id: ${{ github.event.workflow_run.id }} + path: metadata + - name: Parse Pull Request Number + id: parse-pr + run: | + # Find the first github.json file in any of the metadata subdirectories + metadata_file=$(find metadata -name github.json | head -n1) + if [ -n "$metadata_file" ]; then + pr_number=$(jq -r '.event.pull_request.number' "$metadata_file") + echo "PR_NUMBER=$pr_number" >> "$GITHUB_ENV" + fi + - name: Comment on PR + if: ${{ env.PR_NUMBER }} + uses: thollander/actions-comment-pull-request@v3.0.1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + pr-number: ${{ env.PR_NUMBER }} + message: | + 📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ env.PR_NUMBER }}/${{ github.event.workflow_run.id }}/index.html after the github pages "build and deployment" action has completed. diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh new file mode 100755 index 000000000000..5f6dd2a42229 --- /dev/null +++ b/bench-ci/run-assumeutxo-bench.sh @@ -0,0 +1,133 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +# Helper function to check and clean datadir +clean_datadir() { + set -euxo pipefail + + local TMP_DATADIR="$1" + + # Create the directory if it doesn't exist + mkdir -p "${TMP_DATADIR}" + + # If we're in CI, clean without confirmation + if [ -n "${CI:-}" ]; then + rm -Rf "${TMP_DATADIR:?}"/* + else + read -rp "Are you sure you want to delete everything in ${TMP_DATADIR}? [y/N] " response + if [[ "$response" =~ ^[Yy]$ ]]; then + rm -Rf "${TMP_DATADIR:?}"/* + else + echo "Aborting..." + exit 1 + fi + fi +} + +# Helper function to clear logs +clean_logs() { + set -euxo pipefail + + local TMP_DATADIR="$1" + local logfile="${TMP_DATADIR}/debug.log" + + echo "Checking for ${logfile}" + if [ -e "{$logfile}" ]; then + echo "Removing ${logfile}" + rm "${logfile}" + fi +} + +# Execute CMD before each set of timing runs. +setup_assumeutxo_snapshot_run() { + set -euxo pipefail + + local commit="$1" + local TMP_DATADIR="$2" + + git checkout "${commit}" + # Build for CI without bench_bitcoin + cmake -B build -DBUILD_BENCH=OFF -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" + cmake --build build -j "$(nproc)" + clean_datadir "${TMP_DATADIR}" +} + +# Execute CMD before each timing run. +prepare_assumeutxo_snapshot_run() { + set -euxo pipefail + + local TMP_DATADIR="$1" + local UTXO_PATH="$2" + local CONNECT_ADDRESS="$3" + local chain="$4" + + # Run the actual preparation steps + clean_datadir "${TMP_DATADIR}" + build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -stopatheight=1 + build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -dbcache=16000 -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" || true + clean_logs "${TMP_DATADIR}" +} + +# Executed after each timing run +conclude_assumeutxo_snapshot_run() { + set -euxo pipefail + + local commit="$1" + + if [ -e flamegraph.html ]; then + mv flamegraph.html "${commit}"-flamegraph.html + fi +} + +# Execute CMD after the completion of all benchmarking runs for each individual +# command to be benchmarked. +cleanup_assumeutxo_snapshot_run() { + set -euxo pipefail + + local TMP_DATADIR="$1" + + # Clean up the datadir + clean_datadir "${TMP_DATADIR}" +} + +run_benchmark() { + local base_commit="$1" + local head_commit="$2" + local TMP_DATADIR="$3" + local UTXO_PATH="$4" + local results_file="$5" + local chain="$6" + local stop_at_height="$7" + local connect_address="$8" + + # Export functions so they can be used by hyperfine + export -f setup_assumeutxo_snapshot_run + export -f prepare_assumeutxo_snapshot_run + export -f conclude_assumeutxo_snapshot_run + export -f cleanup_assumeutxo_snapshot_run + export -f clean_datadir + export -f clean_logs + + # Run hyperfine + hyperfine \ + --setup "setup_assumeutxo_snapshot_run {commit} ${TMP_DATADIR}" \ + --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain}" \ + --conclude "conclude_assumeutxo_snapshot_run {commit}" \ + --cleanup "cleanup_assumeutxo_snapshot_run ${TMP_DATADIR}" \ + --runs 1 \ + --show-output \ + --export-json "${results_file}" \ + --command-name "base (${base_commit})" \ + --command-name "head (${head_commit})" \ + "perf script flamegraph build/src/bitcoind -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height}" \ + -L commit "${base_commit},${head_commit}" +} + +# Main execution +if [ "$#" -ne 8 ]; then + echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir chain stop_at_height connect_address" + exit 1 +fi + +run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" diff --git a/justfile b/justfile index 8eae95d1401a..caa9879347e2 100644 --- a/justfile +++ b/justfile @@ -18,8 +18,8 @@ build-dev *args: clean cmake --build build -j {{ num_cpus() }} # Build for the CI, including bench_bitcoin -[private] [group('ci')] +[private] build-ci: clean cmake -B build -DBUILD_BENCH=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo -DAPPEND_CPPFLAGS="-fno-omit-frame-pointer" cmake --build build -j {{ num_cpus() }} @@ -72,7 +72,12 @@ lint: export COMMIT_RANGE="$( git rev-list --max-count=1 --merges HEAD )..HEAD" RUST_BACKTRACE=1 cargo run -# Run assumeutxo CI workflow +# Run signet assumeutxo CI workflow [group('ci')] run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: - ./bench-ci/run-assumeutxo-signet-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} signet 170000 "148.251.128.115:55555" + +# Run mainnet assumeutxo CI workflow +[group('ci')] +run-assumeutxo-mainnet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 850000 "148.251.128.115:33333" From 0ceefb0062d5829efc19b0c8caa8f08d52e24aa1 Mon Sep 17 00:00:00 2001 From: Andrew Toth Date: Tue, 3 Dec 2024 14:24:11 -0500 Subject: [PATCH 06/44] Show mainnet results first --- .github/workflows/publish-results.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 8b4b3577a2c9..afeaba7d21ce 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -12,7 +12,7 @@ jobs: contents: write checks: read env: - NETWORKS: "signet,mainnet" + NETWORKS: "mainnet,signet" steps: - uses: actions/checkout@v4 with: From e29d2412eea7c5db366b7a5e6a2ebccb76879b34 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Wed, 18 Dec 2024 20:47:23 +0000 Subject: [PATCH 07/44] isolate benchmark and perf to cores --- bench-ci/run-assumeutxo-bench.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 5f6dd2a42229..7a2245c7c3dc 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -120,7 +120,7 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "perf script flamegraph build/src/bitcoind -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height}" \ + "taskset -c 1 perf script flamegraph taskset -c 2-15 build/src/bitcoind -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height}" \ -L commit "${base_commit},${head_commit}" } From 68e47f9f11c30268ce72d1312dfccc71b7fd524e Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Sun, 29 Dec 2024 02:41:51 +0000 Subject: [PATCH 08/44] use nix 24.11 in shell.nix --- shell.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/shell.nix b/shell.nix index 6a75257493ef..8c0a590bfdbf 100644 --- a/shell.nix +++ b/shell.nix @@ -1,5 +1,5 @@ # Copyright 0xB10C, willcl-ark -{ pkgs ? import (fetchTarball "https://github.com/nixos/nixpkgs/archive/nixos-unstable.tar.gz") {}, +{ pkgs ? import (fetchTarball "https://github.com/nixos/nixpkgs/archive/nixos-24.11.tar.gz") {}, spareCores ? 0, withClang ? false, withDebug ? false, From 92df3f79bc9236d98563f5b214e6ded977176492 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Sun, 29 Dec 2024 03:12:42 +0000 Subject: [PATCH 09/44] add util-linux for taskset --- shell.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/shell.nix b/shell.nix index 8c0a590bfdbf..2bd9dc24c0e8 100644 --- a/shell.nix +++ b/shell.nix @@ -110,6 +110,7 @@ in pkgs.mkShell { jq linuxKernel.packages.linux_6_6.perf perf-tools + util-linux ]; # Modifies the Nix clang++ wrapper to avoid warning: From 0c362c85c2b1f9a7bf9fdb469f44106259f81674 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 30 Dec 2024 20:46:09 -0600 Subject: [PATCH 10/44] remove unneeded install nix action --- .github/workflows/ci.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 17d96b294337..bd4f97242f1b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,9 +33,6 @@ jobs: # Fetch down to the base SHA for the base benchmark comparison as we # only checked out to depth 1 git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }} - - uses: cachix/install-nix-action@v27 - with: - nix_path: $NIX_PATH - name: Run AssumeUTXO ${{ matrix.network }} env: TMP_DATADIR: "${{ runner.temp }}/base_datadir" From d158c68b105e1f684733a17c8263ffdc0bfc3cff Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 30 Dec 2024 20:33:12 -0600 Subject: [PATCH 11/44] use ccache with persistent dir --- .github/workflows/ci.yml | 10 +++++++++- bench-ci/run-assumeutxo-bench.sh | 12 ++++++++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd4f97242f1b..f426e04e1c98 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,13 +33,21 @@ jobs: # Fetch down to the base SHA for the base benchmark comparison as we # only checked out to depth 1 git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }} + - name: Setup ccache + run: | + mkdir -p /data/ccache + export CCACHE_DIR=/data/ccache + export CCACHE_MAXSIZE=50G + ccache -M 50G + ccache -s - name: Run AssumeUTXO ${{ matrix.network }} env: TMP_DATADIR: "${{ runner.temp }}/base_datadir" + CCACHE_DIR: /data/ccache run: | env mkdir -p "$TMP_DATADIR" - nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json" + nix-shell --command "CMAKE_C_COMPILER_LAUNCHER=ccache CMAKE_CXX_COMPILER_LAUNCHER=ccache just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json" - uses: actions/upload-artifact@v4 with: name: result-${{ matrix.network }} diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 7a2245c7c3dc..43567131b7c0 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -47,9 +47,17 @@ setup_assumeutxo_snapshot_run() { local TMP_DATADIR="$2" git checkout "${commit}" - # Build for CI without bench_bitcoin - cmake -B build -DBUILD_BENCH=OFF -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" + ccache -z + ccache -s + cmake -B build \ + -DBUILD_BENCH=OFF \ + -DBUILD_TESTS=OFF \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo \ + -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" \ + -DCMAKE_C_COMPILER_LAUNCHER=ccache \ + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache cmake --build build -j "$(nproc)" + ccache -s clean_datadir "${TMP_DATADIR}" } From 05e31826e2462a2a998ae964ff64411fb0b332f0 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 6 Jan 2025 11:54:55 +0000 Subject: [PATCH 12/44] use isolated cores for build --- bench-ci/run-assumeutxo-bench.sh | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 43567131b7c0..d7f5571f1046 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -49,14 +49,15 @@ setup_assumeutxo_snapshot_run() { git checkout "${commit}" ccache -z ccache -s - cmake -B build \ + # Use all cores (0-15) for the build phase + taskset -c 0-15 cmake -B build \ -DBUILD_BENCH=OFF \ -DBUILD_TESTS=OFF \ -DCMAKE_BUILD_TYPE=RelWithDebInfo \ -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" \ -DCMAKE_C_COMPILER_LAUNCHER=ccache \ -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - cmake --build build -j "$(nproc)" + taskset -c 0-15 cmake --build build -j "$(nproc)" ccache -s clean_datadir "${TMP_DATADIR}" } @@ -72,8 +73,8 @@ prepare_assumeutxo_snapshot_run() { # Run the actual preparation steps clean_datadir "${TMP_DATADIR}" - build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -stopatheight=1 - build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -dbcache=16000 -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" || true + taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -stopatheight=1 + taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -dbcache=16000 -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" || true clean_logs "${TMP_DATADIR}" } From 89063f9042780f993b397816375592d13e0b9e5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 22 Dec 2024 12:57:27 +0100 Subject: [PATCH 13/44] signet 170000 & main 860000 --- justfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/justfile b/justfile index caa9879347e2..3ae8d83fcb29 100644 --- a/justfile +++ b/justfile @@ -75,9 +75,9 @@ lint: # Run signet assumeutxo CI workflow [group('ci')] run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} signet 170000 "148.251.128.115:55555" + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} signet 200000 "148.251.128.115:55555" # Run mainnet assumeutxo CI workflow [group('ci')] run-assumeutxo-mainnet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 850000 "148.251.128.115:33333" + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 860000 "148.251.128.115:33333" From 731d86641c3e6716fee633863335f40b03257eee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 22 Dec 2024 13:03:19 +0100 Subject: [PATCH 14/44] add speedups to PR comment --- .github/workflows/publish-results.yml | 60 ++++++++++++++++----------- 1 file changed, 36 insertions(+), 24 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index afeaba7d21ce..7e5f96ada1b7 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -13,6 +13,9 @@ jobs: checks: read env: NETWORKS: "mainnet,signet" + outputs: + speedups: ${{ steps.organize.outputs.speedups }} + pr-number: ${{ steps.organize.outputs.pr-number }} steps: - uses: actions/checkout@v4 with: @@ -79,19 +82,32 @@ jobs: // Process each network's results const combinedResults = { - results: [] + results: [], + speedups: {} }; for (const network of networks) { if (fs.existsSync(`${network}-results`)) { const networkResults = JSON.parse(fs.readFileSync(`${network}-results/results.json`, 'utf8')); + let baseMean, headMean; - // Add network name to each result + // Add network name to each result and collect means networkResults.results.forEach(result => { result.network = network; combinedResults.results.push(result); + if (result.command.includes('base')) { + baseMean = result.mean; + } else if (result.command.includes('head')) { + headMean = result.mean; + } }); + // Calculate speedup if we have both measurements + if (baseMean && headMean) { + const speedup = ((baseMean - headMean) / baseMean * 100).toFixed(1); + combinedResults.speedups[network] = speedup; + } + // Move flamegraphs if (fs.existsSync(`${network}-flamegraph`)) { fs.readdirSync(`${network}-flamegraph`).forEach(file => { @@ -120,7 +136,12 @@ jobs:

PR #${prNumber} - Run ${runId}

${networks.map(network => `
-

${network} Results

+

+ ${network} Results + ${combinedResults.speedups[network] ? + `(${combinedResults.speedups[network]}% speedup)` + : ''} +

${combinedResults.results .filter(result => result.network === network) @@ -207,10 +228,16 @@ jobs: fs.writeFileSync('index.html', mainIndexHtml); - // Return the URL for the PR comment + // Set outputs for use in PR comment const resultUrl = `https://${context.repo.owner}.github.io/${context.repo.name}/results/pr-${prNumber}/${runId}/index.html`; + const speedupString = Object.entries(combinedResults.speedups) + .map(([network, speedup]) => `${network}: ${speedup}%`) + .join(', '); + core.setOutput('result-url', resultUrl); - return resultUrl; + core.setOutput('speedups', speedupString); + core.setOutput('pr-number', prNumber); + return { url: resultUrl, speedups: speedupString }; - name: Upload Pages artifact uses: actions/upload-pages-artifact@v3 with: @@ -230,27 +257,12 @@ jobs: pull-requests: write actions: read steps: - - name: Download metadata artifact - uses: actions/download-artifact@v4 - with: - pattern: run-metadata-* - github-token: ${{ secrets.GITHUB_TOKEN }} - run-id: ${{ github.event.workflow_run.id }} - path: metadata - - name: Parse Pull Request Number - id: parse-pr - run: | - # Find the first github.json file in any of the metadata subdirectories - metadata_file=$(find metadata -name github.json | head -n1) - if [ -n "$metadata_file" ]; then - pr_number=$(jq -r '.event.pull_request.number' "$metadata_file") - echo "PR_NUMBER=$pr_number" >> "$GITHUB_ENV" - fi - name: Comment on PR - if: ${{ env.PR_NUMBER }} + if: ${{ needs.build.outputs.pr-number != 'main' }} uses: thollander/actions-comment-pull-request@v3.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} - pr-number: ${{ env.PR_NUMBER }} + pr-number: ${{ needs.build.outputs.pr-number }} message: | - 📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ env.PR_NUMBER }}/${{ github.event.workflow_run.id }}/index.html after the github pages "build and deployment" action has completed. + 📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages "build and deployment" action has completed. + 🚀 Speedups: ${{ needs.build.outputs.speedups }} \ No newline at end of file From e3a387a35655700db319879f8b307e03374cf23e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 22 Dec 2024 13:15:00 +0100 Subject: [PATCH 15/44] try using gh instead of actions --- .github/workflows/publish-results.yml | 33 +++++++++++++-------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 7e5f96ada1b7..499fb5d8b69c 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -21,10 +21,11 @@ jobs: with: ref: gh-pages - name: Download artifacts - uses: actions/download-artifact@v4 - with: - github-token: ${{ secrets.GH_PAT }} - run-id: ${{ github.event.workflow_run.id }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh run download ${{ github.event.workflow_run.id }} --repo ${{ github.repository }} + - name: Extract artifacts run: | for network in ${NETWORKS//,/ }; do @@ -50,7 +51,7 @@ jobs: script: | const fs = require('fs'); const networks = process.env.NETWORKS.split(','); - let prNumber = 'main'; + let prNumber = 'master'; let runId; // First, extract metadata and get PR number @@ -244,10 +245,8 @@ jobs: path: results - name: Commit and push to gh-pages run: | - git config --global user.name 'github-actions[bot]' - git config --global user.email 'github-actions[bot]@users.noreply.github.com' - git add results/ - git add index.html + git config --global -c user.name='github-actions[bot]' -c user.email='github-actions[bot]@users.noreply.github.com' + git add results/ index.html git commit -m "Update benchmark results from run ${{ github.event.workflow_run.id }}" git push origin gh-pages comment-pr: @@ -258,11 +257,11 @@ jobs: actions: read steps: - name: Comment on PR - if: ${{ needs.build.outputs.pr-number != 'main' }} - uses: thollander/actions-comment-pull-request@v3.0.1 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - pr-number: ${{ needs.build.outputs.pr-number }} - message: | - 📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages "build and deployment" action has completed. - 🚀 Speedups: ${{ needs.build.outputs.speedups }} \ No newline at end of file + if: ${{ needs.build.outputs.pr-number != 'master' }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr comment ${{ needs.build.outputs.pr-number }} \ + --repo ${{ github.repository }} \ + --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed. + 🚀 Speedups: ${{ needs.build.outputs.speedups }}" \ No newline at end of file From f2b075f7c15dd91f3c609ec22d26f9c3e622d05a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 22 Dec 2024 13:29:30 +0100 Subject: [PATCH 16/44] Widen the flames to 80% of the page width --- .github/workflows/publish-results.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 499fb5d8b69c..e39eb0658064 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -131,7 +131,7 @@ jobs: -
+

Benchmark Results

PR #${prNumber} - Run ${runId}

@@ -207,7 +207,7 @@ jobs: -
+

Bitcoin Benchmark Results

Available Results

From 1d60a8d9a58cf8c5d1ecf130bf2e23c0a58ca699 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 22 Dec 2024 21:58:24 +0100 Subject: [PATCH 17/44] Turn off unused options during build --- bench-ci/run-assumeutxo-bench.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index d7f5571f1046..aaf3058f9a3c 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -33,7 +33,7 @@ clean_logs() { local logfile="${TMP_DATADIR}/debug.log" echo "Checking for ${logfile}" - if [ -e "{$logfile}" ]; then + if [ -e "${logfile}" ]; then echo "Removing ${logfile}" rm "${logfile}" fi @@ -53,10 +53,13 @@ setup_assumeutxo_snapshot_run() { taskset -c 0-15 cmake -B build \ -DBUILD_BENCH=OFF \ -DBUILD_TESTS=OFF \ + -DBUILD_TX=OFF \ + -DBUILD_UTIL=OFF \ -DCMAKE_BUILD_TYPE=RelWithDebInfo \ + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" \ -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache + -DINSTALL_MAN=OFF taskset -c 0-15 cmake --build build -j "$(nproc)" ccache -s clean_datadir "${TMP_DATADIR}" From d5e17c6de05126e1a629c2a7db544a5f4513d030 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 22 Dec 2024 21:59:00 +0100 Subject: [PATCH 18/44] add dbcache parameter, turn off console printing and add a network with large cache --- .github/workflows/ci.yml | 10 ++++++++-- .github/workflows/publish-results.yml | 2 +- bench-ci/run-assumeutxo-bench.sh | 18 ++++++++++-------- justfile | 15 ++++++++++----- 4 files changed, 29 insertions(+), 16 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f426e04e1c98..f8e099be9a87 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,9 +11,15 @@ jobs: - network: signet timeout: 20 utxo_path: /var/lib/bitcoin/utxo-signet-160000.dat - - network: mainnet + dbcache: 550 + - network: mainnet-default timeout: 600 utxo_path: /var/lib/bitcoin/utxo-840000.dat + dbcache: 550 + - network: mainnet-large + timeout: 600 + utxo_path: /var/lib/bitcoin/utxo-840000.dat + dbcache: 10000 runs-on: [self-hosted, linux, x64] timeout-minutes: ${{ matrix.timeout }} env: @@ -47,7 +53,7 @@ jobs: run: | env mkdir -p "$TMP_DATADIR" - nix-shell --command "CMAKE_C_COMPILER_LAUNCHER=ccache CMAKE_CXX_COMPILER_LAUNCHER=ccache just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json" + nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json ${{ matrix.dbcache }}" - uses: actions/upload-artifact@v4 with: name: result-${{ matrix.network }} diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index e39eb0658064..aa8ccb329098 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -12,7 +12,7 @@ jobs: contents: write checks: read env: - NETWORKS: "mainnet,signet" + NETWORKS: "mainnet-default,mainnet-large,signet" outputs: speedups: ${{ steps.organize.outputs.speedups }} pr-number: ${{ steps.organize.outputs.pr-number }} diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index aaf3058f9a3c..90866f09a294 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -72,12 +72,13 @@ prepare_assumeutxo_snapshot_run() { local TMP_DATADIR="$1" local UTXO_PATH="$2" local CONNECT_ADDRESS="$3" - local chain="$4" + local CHAIN="$4" + local DBCACHE="$5" # Run the actual preparation steps clean_datadir "${TMP_DATADIR}" - taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -stopatheight=1 - taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${chain}" -dbcache=16000 -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" || true + taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -stopatheight=1 -printtoconsole=0 + taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -dbcache="${DBCACHE} -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" -printtoconsole=0 || true clean_logs "${TMP_DATADIR}" } @@ -112,6 +113,7 @@ run_benchmark() { local chain="$6" local stop_at_height="$7" local connect_address="$8" + local dbcache="$9" # Export functions so they can be used by hyperfine export -f setup_assumeutxo_snapshot_run @@ -124,7 +126,7 @@ run_benchmark() { # Run hyperfine hyperfine \ --setup "setup_assumeutxo_snapshot_run {commit} ${TMP_DATADIR}" \ - --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain}" \ + --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain} ${dbcache}" \ --conclude "conclude_assumeutxo_snapshot_run {commit}" \ --cleanup "cleanup_assumeutxo_snapshot_run ${TMP_DATADIR}" \ --runs 1 \ @@ -132,14 +134,14 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 perf script flamegraph taskset -c 2-15 build/src/bitcoind -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height}" \ + "taskset -c 1 perf script flamegraph taskset -c 2-15 build/src/bitcoind -datadir=${TMP_DATADIR} \-connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ -L commit "${base_commit},${head_commit}" } # Main execution -if [ "$#" -ne 8 ]; then - echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir chain stop_at_height connect_address" +if [ "$#" -ne 9 ]; then + echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir chain stop_at_height connect_address dbcache" exit 1 fi -run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" +run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" diff --git a/justfile b/justfile index 3ae8d83fcb29..be94181197f9 100644 --- a/justfile +++ b/justfile @@ -74,10 +74,15 @@ lint: # Run signet assumeutxo CI workflow [group('ci')] -run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} signet 200000 "148.251.128.115:55555" +run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} signet 200000 "148.251.128.115:55555" {{ dbcache }} -# Run mainnet assumeutxo CI workflow +# Run mainnet assumeutxo CI workflow for default cache [group('ci')] -run-assumeutxo-mainnet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 860000 "148.251.128.115:33333" +run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 860000 "148.251.128.115:33333" {{ dbcache }} + +# Run mainnet assumeutxo CI workflow for large cache +[group('ci')] +run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 860000 "148.251.128.115:33333" {{ dbcache }} From 60c9fb469aee4f6780aae83b641d516274eb7b60 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 6 Jan 2025 17:02:09 +0000 Subject: [PATCH 19/44] fixup publish job --- .github/workflows/publish-results.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index aa8ccb329098..908c1d4d3f02 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -139,8 +139,8 @@ jobs:

${network} Results - ${combinedResults.speedups[network] ? - `(${combinedResults.speedups[network]}% speedup)` + ${combinedResults.speedups[network] ? + `(${combinedResults.speedups[network]}% speedup)` : ''}

@@ -234,7 +234,7 @@ jobs: const speedupString = Object.entries(combinedResults.speedups) .map(([network, speedup]) => `${network}: ${speedup}%`) .join(', '); - + core.setOutput('result-url', resultUrl); core.setOutput('speedups', speedupString); core.setOutput('pr-number', prNumber); @@ -245,7 +245,8 @@ jobs: path: results - name: Commit and push to gh-pages run: | - git config --global -c user.name='github-actions[bot]' -c user.email='github-actions[bot]@users.noreply.github.com' + git config --global user.name='github-actions[bot]' + git config --global user.email='github-actions[bot]@users.noreply.github.com' git add results/ index.html git commit -m "Update benchmark results from run ${{ github.event.workflow_run.id }}" git push origin gh-pages @@ -264,4 +265,4 @@ jobs: gh pr comment ${{ needs.build.outputs.pr-number }} \ --repo ${{ github.repository }} \ --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed. - 🚀 Speedups: ${{ needs.build.outputs.speedups }}" \ No newline at end of file + 🚀 Speedups: ${{ needs.build.outputs.speedups }}" From 38411e34fb3437f689affb9162cca5bbbb6ac8c7 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 6 Jan 2025 18:53:54 +0000 Subject: [PATCH 20/44] fixup typo in bench script --- .github/workflows/publish-results.yml | 4 ++-- bench-ci/run-assumeutxo-bench.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 908c1d4d3f02..43d55e8aa121 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -245,8 +245,8 @@ jobs: path: results - name: Commit and push to gh-pages run: | - git config --global user.name='github-actions[bot]' - git config --global user.email='github-actions[bot]@users.noreply.github.com' + git config --global user.name "github-actions[bot]" + git config --global user.email "github-actions[bot]@users.noreply.github.com" git add results/ index.html git commit -m "Update benchmark results from run ${{ github.event.workflow_run.id }}" git push origin gh-pages diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 90866f09a294..832b17d794b6 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -78,7 +78,7 @@ prepare_assumeutxo_snapshot_run() { # Run the actual preparation steps clean_datadir "${TMP_DATADIR}" taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -stopatheight=1 -printtoconsole=0 - taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -dbcache="${DBCACHE} -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" -printtoconsole=0 || true + taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -dbcache="${DBCACHE}" -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" -printtoconsole=0 || true clean_logs "${TMP_DATADIR}" } From 95b388bcd81197e800b70c43e26d2974d29ea916 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Thu, 2 Jan 2025 14:08:59 +0100 Subject: [PATCH 21/44] Add parser & plotter for time, block height & cache size # Conflicts: # .github/workflows/ci.yml # bench-ci/run-assumeutxo-bench.sh # justfile --- .github/workflows/ci.yml | 6 ++- .github/workflows/publish-results.yml | 26 +++++++++++ bench-ci/parse_and_plot.py | 66 +++++++++++++++++++++++++++ bench-ci/run-assumeutxo-bench.sh | 41 ++++++++++++----- justfile | 12 ++--- pyproject.toml | 4 +- requirements.txt | 4 ++ shell.nix | 3 ++ 8 files changed, 143 insertions(+), 19 deletions(-) create mode 100755 bench-ci/parse_and_plot.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f8e099be9a87..66c986352d15 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,11 +53,15 @@ jobs: run: | env mkdir -p "$TMP_DATADIR" - nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json ${{ matrix.dbcache }}" + nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json ${{ matrix.dbcache }} ${{ runner.temp }}/pngs" - uses: actions/upload-artifact@v4 with: name: result-${{ matrix.network }} path: "${{ runner.temp }}/results.json" + - uses: actions/upload-artifact@v4 + with: + name: pngs-${{ matrix.network }} + path: "${{ runner.temp }}/pngs/*.png" - uses: actions/upload-artifact@v4 with: name: flamegraph-${{ matrix.network }} diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 43d55e8aa121..ced2c75068fd 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -43,6 +43,11 @@ jobs: mkdir -p "${network}-metadata" mv "run-metadata-${network}"/* "${network}-metadata/" fi + + if [ -d "pngs-${network}" ]; then + mkdir -p "${network}-plots" + mv "pngs-${network}"/*.png "${network}-plots/" + fi done - name: Organize results id: organize @@ -117,6 +122,17 @@ jobs: fs.copyFileSync(sourceFile, targetFile); }); } + + // Move plots + if (fs.existsSync(plotsDir)) { + const targetPlotsDir = `${resultDir}/${network}-plots`; + fs.mkdirSync(targetPlotsDir, { recursive: true }); + fs.readdirSync(plotsDir).forEach(plot => { + const sourcePlot = `${plotsDir}/${plot}`; + const targetPlot = `${targetPlotsDir}/${plot}`; + fs.copyFileSync(sourcePlot, targetPlot); + }); + } } } @@ -180,6 +196,16 @@ jobs: ${fs.existsSync(flameGraphPath) ? ` ` : ''} + ${fs.existsSync("${network}-plots/cache_vs_height.png") ? ` +

Additional Plots

+
+ Height vs Time + Cache vs Height + Cache vs Time + Tx vs Height + Coins cache vs Height +
+ ` : ''} `; }).join('')}
diff --git a/bench-ci/parse_and_plot.py b/bench-ci/parse_and_plot.py new file mode 100755 index 000000000000..28315605f240 --- /dev/null +++ b/bench-ci/parse_and_plot.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +import sys +import os +import re +import datetime +import matplotlib.pyplot as plt + + +def parse_line(line): + match = re.match( + r'^([\d\-:TZ]+) UpdateTip: new best.+height=(\d+).+tx=(\d+).+cache=([\d.]+)MiB\((\d+)txo\)', + line + ) + if not match: + return None + iso_str, height_str, tx_str, cache_size_mb_str, cache_coins_count_str = match.groups() + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime, int(height_str), int(tx_str), float(cache_size_mb_str), int(cache_coins_count_str) + + +def parse_log_file(log_file): + with open(log_file, 'r', encoding='utf-8') as f: + data = [result for line in f if (result := parse_line(line))] + if not data: + print("No UpdateTip entries found.") + sys.exit(0) + assert all(data[i][0] <= data[i + 1][0] for i in range(len(data) - 1)), "Entries are not sorted by time" + return data + + +def generate_plot(x, y, x_label, y_label, title, output_file): + plt.figure(figsize=(20, 10)) + plt.plot(x, y) + plt.title(title) + plt.xlabel(x_label) + plt.ylabel(y_label) + plt.grid(True) + plt.xticks(rotation=90) + plt.tight_layout() + plt.savefig(output_file) + plt.close() + + +if __name__ == "__main__": + if len(sys.argv) != 3: + print(f"Usage: {sys.argv[0]} ") + sys.exit(1) + + log_file = sys.argv[1] + if not os.path.isfile(log_file): + print(f"File not found: {log_file}") + sys.exit(1) + + png_dir = sys.argv[2] + os.makedirs(png_dir, exist_ok=True) + + times, heights, tx_counts, cache_size, cache_count = zip(*parse_log_file(log_file)) + float_minutes = [(t - times[0]).total_seconds() / 60 for t in times] + + generate_plot(float_minutes, heights, "Elapsed minutes", "Block Height", "Block Height vs Time", os.path.join(png_dir, "height_vs_time.png")) + generate_plot(heights, cache_size, "Block Height", "Cache Size (MiB)", "Cache Size vs Block Height", os.path.join(png_dir, "cache_vs_height.png")) + generate_plot(float_minutes, cache_size, "Elapsed minutes", "Cache Size (MiB)", "Cache Size vs Time", os.path.join(png_dir, "cache_vs_time.png")) + generate_plot(heights, tx_counts, "Block Height", "Transaction Count", "Transactions vs Block Height", os.path.join(png_dir, "tx_vs_height.png")) + generate_plot(heights, cache_count, "Block Height", "Coins Cache Size", "Coins Cache Size vs Block Height", os.path.join(png_dir, "coins_cache_vs_height.png")) + + print("Plots saved!") diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 832b17d794b6..555709a11403 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -55,11 +55,11 @@ setup_assumeutxo_snapshot_run() { -DBUILD_TESTS=OFF \ -DBUILD_TX=OFF \ -DBUILD_UTIL=OFF \ + -DINSTALL_MAN=OFF \ -DCMAKE_BUILD_TYPE=RelWithDebInfo \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" \ -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DINSTALL_MAN=OFF + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ + -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" taskset -c 0-15 cmake --build build -j "$(nproc)" ccache -s clean_datadir "${TMP_DATADIR}" @@ -87,7 +87,25 @@ conclude_assumeutxo_snapshot_run() { set -euxo pipefail local commit="$1" + local TMP_DATADIR="$2" + local PNG_DIR="$3" + + # Search in subdirs e.g. $datadir/signet + debug_log=$(find "${TMP_DATADIR}" -name debug.log -print -quit) + if [ -n "${debug_log}" ]; then + echo "Generating plots from ${debug_log}" + if [ -x "bench-ci/parse_and_plot.py" ]; then + bench-ci/parse_and_plot.py "${debug_log}" "${PNG_DIR}" + else + ls -al "bench-ci/" + echo "parse_and_plot.py not found or not executable, skipping plot generation" + fi + else + ls -al "${TMP_DATADIR}/" + echo "debug.log not found, skipping plot generation" + fi + # Move flamegraph if exists if [ -e flamegraph.html ]; then mv flamegraph.html "${commit}"-flamegraph.html fi @@ -110,10 +128,11 @@ run_benchmark() { local TMP_DATADIR="$3" local UTXO_PATH="$4" local results_file="$5" - local chain="$6" - local stop_at_height="$7" - local connect_address="$8" - local dbcache="$9" + local png_dir="$6" + local chain="$7" + local stop_at_height="$8" + local connect_address="$9" + local dbcache="${10}" # Export functions so they can be used by hyperfine export -f setup_assumeutxo_snapshot_run @@ -127,7 +146,7 @@ run_benchmark() { hyperfine \ --setup "setup_assumeutxo_snapshot_run {commit} ${TMP_DATADIR}" \ --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain} ${dbcache}" \ - --conclude "conclude_assumeutxo_snapshot_run {commit}" \ + --conclude "conclude_assumeutxo_snapshot_run {commit} ${TMP_DATADIR} ${png_dir}" \ --cleanup "cleanup_assumeutxo_snapshot_run ${TMP_DATADIR}" \ --runs 1 \ --show-output \ @@ -139,9 +158,9 @@ run_benchmark() { } # Main execution -if [ "$#" -ne 9 ]; then - echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir chain stop_at_height connect_address dbcache" +if [ "$#" -ne 10 ]; then + echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir png_dir chain stop_at_height connect_address dbcache" exit 1 fi -run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" +run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" diff --git a/justfile b/justfile index be94181197f9..745dffab967f 100644 --- a/justfile +++ b/justfile @@ -74,15 +74,15 @@ lint: # Run signet assumeutxo CI workflow [group('ci')] -run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} signet 200000 "148.251.128.115:55555" {{ dbcache }} +run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 200000 "148.251.128.115:55555" {{ dbcache }} # Run mainnet assumeutxo CI workflow for default cache [group('ci')] -run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 860000 "148.251.128.115:33333" {{ dbcache }} +run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} # Run mainnet assumeutxo CI workflow for large cache [group('ci')] -run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} main 860000 "148.251.128.115:33333" {{ dbcache }} +run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} diff --git a/pyproject.toml b/pyproject.toml index 51bfa566a5c5..26605fc84930 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,5 +9,7 @@ dependencies = [ # Removing in favour of packaged nixpkgs bin which is not dynamically linked # "ruff==0.5.5", "vulture==2.6", - "pyperf" + "pyperf==2.8.0", + "matplotlib==3.8.0", + "numpy==1.26.0" ] diff --git a/requirements.txt b/requirements.txt index f972af642ae9..c9b220b6fe46 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,10 +4,14 @@ codespell==2.2.6 # via bitcoin-core-deps (pyproject.toml) lief==0.13.2 # via bitcoin-core-deps (pyproject.toml) +matplotlib==3.8.0 + # via bitcoin-core-deps (pyproject.toml) mypy==1.4.1 # via bitcoin-core-deps (pyproject.toml) mypy-extensions==1.0.0 # via mypy +numpy==1.26.0 + # via bitcoin-core-deps (pyproject.toml) psutil==6.1.0 # via pyperf pyperf==2.8.0 diff --git a/shell.nix b/shell.nix index 2bd9dc24c0e8..8e60b997d709 100644 --- a/shell.nix +++ b/shell.nix @@ -122,6 +122,9 @@ in pkgs.mkShell { echo "" echo "Setting up python venv" + # fixes libstdc++ issues and libgl.so issues + export LD_LIBRARY_PATH=${stdenv.cc.cc.lib}/lib/:$LD_LIBRARY_PATH + uv venv --python 3.10 source .venv/bin/activate uv pip install -r pyproject.toml From 26071daedc6734b5de033cd5dc5f069996cac794 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 6 Jan 2025 22:55:43 +0000 Subject: [PATCH 22/44] streamline ci --- .github/workflows/ci.yml | 61 +++++++++++++++----- bench-ci/run-assumeutxo-bench.sh | 44 ++++++--------- justfile | 95 +++++++++----------------------- 3 files changed, 89 insertions(+), 111 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 66c986352d15..f540c390f449 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,7 +4,42 @@ on: branches: - master jobs: + build-binaries: + runs-on: [self-hosted, linux, x64] + env: + NIX_PATH: nixpkgs=channel:nixos-unstable + BASE_SHA: ${{ github.event.pull_request.base.sha }} + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Fetch base commit + run: | + echo "CHECKOUT_COMMIT=$(git rev-parse HEAD)" >> "$GITHUB_ENV" + git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }} + - name: Setup ccache + run: | + mkdir -p /data/ccache + export CCACHE_DIR=/data/ccache + export CCACHE_MAXSIZE=50G + ccache -M 50G + ccache -s + - name: Build both binaries + env: + CCACHE_DIR: /data/ccache + run: | + mkdir -p ${{ runner.temp }}/binaries + nix-shell --command "just build-assumeutxo-binaries $BASE_SHA $CHECKOUT_COMMIT" + cp build-base/src/bitcoind ${{ runner.temp }}/binaries/bitcoind-base + cp build-head/src/bitcoind ${{ runner.temp }}/binaries/bitcoind-head + - name: Upload binaries + uses: actions/upload-artifact@v4 + with: + name: bitcoind-binaries + path: ${{ runner.temp }}/binaries/ assumeutxo: + needs: build-binaries strategy: matrix: include: @@ -31,29 +66,29 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 1 + - name: Download binaries + uses: actions/download-artifact@v4 + with: + name: bitcoind-binaries + path: ${{ runner.temp }}/binaries + - name: Set binary permissions + run: | + chmod +x ${{ runner.temp }}/binaries/bitcoind-base + chmod +x ${{ runner.temp }}/binaries/bitcoind-head - name: Fetch base commit run: | - # Export the commit hash for use in later steps echo "CHECKOUT_COMMIT=$(git rev-parse HEAD)" >> "$GITHUB_ENV" - - # Fetch down to the base SHA for the base benchmark comparison as we - # only checked out to depth 1 git fetch --depth=1 origin ${{ github.event.pull_request.base.sha }} - - name: Setup ccache - run: | - mkdir -p /data/ccache - export CCACHE_DIR=/data/ccache - export CCACHE_MAXSIZE=50G - ccache -M 50G - ccache -s - name: Run AssumeUTXO ${{ matrix.network }} env: TMP_DATADIR: "${{ runner.temp }}/base_datadir" - CCACHE_DIR: /data/ccache + BINARIES_DIR: "${{ runner.temp }}/binaries" run: | env mkdir -p "$TMP_DATADIR" - nix-shell --command "just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json ${{ matrix.dbcache }} ${{ runner.temp }}/pngs" + CMD="nix-shell --command \"just run-assumeutxo-${{ matrix.network }}-ci $BASE_SHA $CHECKOUT_COMMIT $TMP_DATADIR $UTXO_PATH ${{ runner.temp }}/results.json ${{ matrix.dbcache }} ${{ runner.temp }}/pngs $BINARIES_DIR\"" + echo "Running command: $CMD" + eval "$CMD" - uses: actions/upload-artifact@v4 with: name: result-${{ matrix.network }} diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 555709a11403..fb60976ee05a 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -43,25 +43,8 @@ clean_logs() { setup_assumeutxo_snapshot_run() { set -euxo pipefail - local commit="$1" - local TMP_DATADIR="$2" - - git checkout "${commit}" - ccache -z - ccache -s - # Use all cores (0-15) for the build phase - taskset -c 0-15 cmake -B build \ - -DBUILD_BENCH=OFF \ - -DBUILD_TESTS=OFF \ - -DBUILD_TX=OFF \ - -DBUILD_UTIL=OFF \ - -DINSTALL_MAN=OFF \ - -DCMAKE_BUILD_TYPE=RelWithDebInfo \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" - taskset -c 0-15 cmake --build build -j "$(nproc)" - ccache -s + local TMP_DATADIR="$1" + local commit="$2" clean_datadir "${TMP_DATADIR}" } @@ -74,11 +57,14 @@ prepare_assumeutxo_snapshot_run() { local CONNECT_ADDRESS="$3" local CHAIN="$4" local DBCACHE="$5" + local commit="$6" + local BINARIES_DIR="$7" # Run the actual preparation steps clean_datadir "${TMP_DATADIR}" - taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -stopatheight=1 -printtoconsole=0 - taskset -c 0-15 build/src/bitcoind -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -dbcache="${DBCACHE}" -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" -printtoconsole=0 || true + # Use the pre-built binaries from BINARIES_DIR + taskset -c 0-15 "${BINARIES_DIR}/bitcoind-${commit}" -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -stopatheight=1 -printtoconsole=0 + taskset -c 0-15 "${BINARIES_DIR}/bitcoind-${commit}" -datadir="${TMP_DATADIR}" -connect="${CONNECT_ADDRESS}" -daemon=0 -chain="${CHAIN}" -dbcache="${DBCACHE}" -pausebackgroundsync=1 -loadutxosnapshot="${UTXO_PATH}" -printtoconsole=0 || true clean_logs "${TMP_DATADIR}" } @@ -133,6 +119,7 @@ run_benchmark() { local stop_at_height="$8" local connect_address="$9" local dbcache="${10}" + local BINARIES_DIR="${11}" # Export functions so they can be used by hyperfine export -f setup_assumeutxo_snapshot_run @@ -144,8 +131,9 @@ run_benchmark() { # Run hyperfine hyperfine \ - --setup "setup_assumeutxo_snapshot_run {commit} ${TMP_DATADIR}" \ - --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain} ${dbcache}" \ + --shell=bash \ + --setup "setup_assumeutxo_snapshot_run ${TMP_DATADIR} {commit}" \ + --prepare "prepare_assumeutxo_snapshot_run ${TMP_DATADIR} ${UTXO_PATH} ${connect_address} ${chain} ${dbcache} {commit} ${BINARIES_DIR}" \ --conclude "conclude_assumeutxo_snapshot_run {commit} ${TMP_DATADIR} ${png_dir}" \ --cleanup "cleanup_assumeutxo_snapshot_run ${TMP_DATADIR}" \ --runs 1 \ @@ -153,14 +141,14 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 perf script flamegraph taskset -c 2-15 build/src/bitcoind -datadir=${TMP_DATADIR} \-connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ - -L commit "${base_commit},${head_commit}" + "taskset -c 1 perf script flamegraph taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ + -L commit "base,head" } # Main execution -if [ "$#" -ne 10 ]; then - echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir png_dir chain stop_at_height connect_address dbcache" +if [ "$#" -ne 11 ]; then + echo "Usage: $0 base_commit head_commit TMP_DATADIR UTXO_PATH results_dir png_dir chain stop_at_height connect_address dbcache BINARIES_DIR" exit 1 fi -run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" +run_benchmark "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" "${10}" "${11}" diff --git a/justfile b/justfile index 745dffab967f..378b1ab8f5d3 100644 --- a/justfile +++ b/justfile @@ -5,84 +5,39 @@ os := os() default: just --list -# Build default project -[group('build')] -build *args: clean - cmake -B build {{ args }} - cmake --build build -j {{ num_cpus() }} - -# Build with all optional modules -[group('build')] -build-dev *args: clean - cmake -B build --preset dev-mode {{ args }} - cmake --build build -j {{ num_cpus() }} - -# Build for the CI, including bench_bitcoin +# Build base and head binaries for CI [group('ci')] -[private] -build-ci: clean - cmake -B build -DBUILD_BENCH=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo -DAPPEND_CPPFLAGS="-fno-omit-frame-pointer" - cmake --build build -j {{ num_cpus() }} - -# Re-build current config -[group('build')] -rebuild: - cmake --build build -j {{ num_cpus() }} - -# Clean build dir using git clean -dfx -[group('build')] -clean: - git clean -dfx - -# Run unit tests -[group('test')] -test-unit: - ctest --test-dir build -j {{ num_cpus() }} - -# Run all functional tests -[group('test')] -test-func: - build/test/functional/test_runner.py -j {{ num_cpus() }} - -# Run all unit and functional tests -[group('test')] -test: test-unit test-func - -# Run a single functional test (filename.py) -[group('test')] -test-func1 test: - build/test/functional/test_runner.py {{ test }} - -# Run a single unit test suite -[group('test')] -test-unit1 suite: - build/src/test/test_bitcoin --log_level=all --run_test={{ suite }} - -# Run benchmarks -[group('perf')] -bench: - build/src/bench/bench_bitcoin - -# Run the lint job -lint: +build-assumeutxo-binaries base_commit head_commit: #!/usr/bin/env bash - cd test/lint/test_runner/ - cargo fmt - cargo clippy - export COMMIT_RANGE="$( git rev-list --max-count=1 --merges HEAD )..HEAD" - RUST_BACKTRACE=1 cargo run + set -euxo pipefail + for build in "base:{{ base_commit }}" "head:{{ head_commit }}"; do + name="${build%%:*}" + commit="${build#*:}" + git checkout "$commit" + taskset -c 0-15 cmake -B "build-$name" \ + -DBUILD_BENCH=OFF \ + -DBUILD_TESTS=OFF \ + -DBUILD_TX=OFF \ + -DBUILD_UTIL=OFF \ + -DINSTALL_MAN=OFF \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo \ + -DCMAKE_C_COMPILER_LAUNCHER=ccache \ + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ + -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" + taskset -c 0-15 cmake --build "build-$name" -j {{ num_cpus() }} + done # Run signet assumeutxo CI workflow [group('ci')] -run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 200000 "148.251.128.115:55555" {{ dbcache }} +run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 200000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for default cache [group('ci')] -run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} +run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for large cache [group('ci')] -run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} +run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} From 85774fe88f6690b2bdac81abc7f8c7b6e864979a Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Tue, 7 Jan 2025 07:50:10 +0000 Subject: [PATCH 23/44] streamline shell.nix --- shell.nix | 161 +++++++++++++++--------------------------------------- 1 file changed, 45 insertions(+), 116 deletions(-) diff --git a/shell.nix b/shell.nix index 8e60b997d709..4bc5e29061f3 100644 --- a/shell.nix +++ b/shell.nix @@ -1,18 +1,11 @@ # Copyright 0xB10C, willcl-ark -{ pkgs ? import (fetchTarball "https://github.com/nixos/nixpkgs/archive/nixos-24.11.tar.gz") {}, - spareCores ? 0, - withClang ? false, - withDebug ? false, -}: +{ pkgs ? import + (fetchTarball "https://github.com/nixos/nixpkgs/archive/nixos-24.11.tar.gz") + { }, }: let inherit (pkgs.lib) optionals strings; inherit (pkgs) stdenv; - # Add mlc binary fetching - mlcBinary = pkgs.fetchurl { - url = "https://github.com/becheran/mlc/releases/download/v0.18.0/mlc-x86_64-linux"; - sha256 = "sha256-jbdp+UlFybBE+o567L398hbcWHsG8aQGqYYf5h9JRkw="; - }; # Hyperfine # Included here because we need master for the `--conclude` flag from pr 719 hyperfine = pkgs.rustPlatform.buildRustPackage rec { @@ -38,111 +31,47 @@ let }; }; - # Create a derivation for mlc - mlc = pkgs.runCommand "mlc" {} '' - mkdir -p $out/bin - cp ${mlcBinary} $out/bin/mlc - chmod +x $out/bin/mlc - ''; - - binDirs = - [ "\$PWD/build/src" ]; - configureFlags = - [ "--with-boost-libdir=$NIX_BOOST_LIB_DIR" ] - ++ optionals withClang [ "CXX=clang++" "CC=clang" ] - ++ optionals withDebug [ "--enable-debug" ]; - jobs = - if (strings.hasSuffix "linux" builtins.currentSystem) then "$(($(nproc)-${toString spareCores}))" - else if (strings.hasSuffix "darwin" builtins.currentSystem) then "$(($(sysctl -n hw.physicalcpu)-${toString spareCores}))" - else "6"; in pkgs.mkShell { - nativeBuildInputs = with pkgs; [ - autoconf - automake - libtool - pkg-config - boost - libevent - zeromq - sqlite - clang_18 - - # tests - hexdump - - # compiler output caching per - # https://github.com/bitcoin/bitcoin/blob/master/doc/productivity.md#cache-compilations-with-ccache - ccache - - # for newer cmake building - cmake - - # depends - byacc - - # debugging - gdb - - # tracing - libsystemtap - linuxPackages.bpftrace - linuxPackages.bcc - - ]; - buildInputs = with pkgs; [ - just - bash - - # lint requirements - cargo - git - mlc - ruff - rustc - rustup - shellcheck - python310 - uv - - # Benchmarking - flamegraph - hyperfine - jq - linuxKernel.packages.linux_6_6.perf - perf-tools - util-linux - ]; - - # Modifies the Nix clang++ wrapper to avoid warning: - # "_FORTIFY_SOURCE requires compiling with optimization (-O)" - hardeningDisable = if withDebug then [ "all" ] else [ ]; - - shellHook = '' - echo "Bitcoin Core build nix-shell" - echo "" - echo "Setting up python venv" - - # fixes libstdc++ issues and libgl.so issues - export LD_LIBRARY_PATH=${stdenv.cc.cc.lib}/lib/:$LD_LIBRARY_PATH - - uv venv --python 3.10 - source .venv/bin/activate - uv pip install -r pyproject.toml - - BCC_EGG=${pkgs.linuxPackages.bcc}/${pkgs.python3.sitePackages}/bcc-${pkgs.linuxPackages.bcc.version}-py3.${pkgs.python3.sourceVersion.minor}.egg - - echo "adding bcc egg to PYTHONPATH: $BCC_EGG" - if [ -f $BCC_EGG ]; then - export PYTHONPATH="$PYTHONPATH:$BCC_EGG" - echo "" - else - echo "The bcc egg $BCC_EGG does not exist. Maybe the python or bcc version is different?" - fi - - echo "adding ${builtins.concatStringsSep ":" binDirs} to \$PATH to make running built binaries more natural" - export PATH=$PATH:${builtins.concatStringsSep ":" binDirs}; - - rustup default stable - rustup component add rustfmt - ''; + nativeBuildInputs = with pkgs; [ + autoconf + automake + boost + ccache + clang_18 + cmake + libevent + libtool + pkg-config + sqlite + zeromq + ]; + buildInputs = with pkgs; [ + just + bash + git + shellcheck + python310 + uv + + # Benchmarking + flamegraph + hyperfine + jq + linuxKernel.packages.linux_6_6.perf + perf-tools + util-linux + ]; + + shellHook = '' + echo "Bitcoin Core build nix-shell" + echo "" + echo "Setting up python venv" + + # fixes libstdc++ issues and libgl.so issues + export LD_LIBRARY_PATH=${stdenv.cc.cc.lib}/lib/:$LD_LIBRARY_PATH + + uv venv --python 3.10 + source .venv/bin/activate + uv pip install -r pyproject.toml + ''; } From 215582426f78cf762bdbf6240dc08f60f51f5b3a Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Tue, 7 Jan 2025 09:12:47 +0000 Subject: [PATCH 24/44] use max datadir on large run --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f540c390f449..c10187a17c35 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -54,7 +54,7 @@ jobs: - network: mainnet-large timeout: 600 utxo_path: /var/lib/bitcoin/utxo-840000.dat - dbcache: 10000 + dbcache: 32000 runs-on: [self-hosted, linux, x64] timeout-minutes: ${{ matrix.timeout }} env: From 76a5ae88d21b8790d2afb740e68f5272c74d6c6e Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Tue, 7 Jan 2025 09:12:56 +0000 Subject: [PATCH 25/44] add local signet command --- .gitignore | 1 + justfile | 57 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/.gitignore b/.gitignore index a419c9bde745..240c4a116082 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,4 @@ test/lint/test_runner/target/ /guix-build-* /ci/scratch/ +utxo-signet-160000.dat diff --git a/justfile b/justfile index 378b1ab8f5d3..3f166cfde725 100644 --- a/justfile +++ b/justfile @@ -41,3 +41,60 @@ run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH [group('ci')] run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + +# Run a signet benchmark locally +[group('local')] +run-signet: + #!/usr/bin/env bash + set -euo pipefail + set -x + + # Get git HEAD and merge-base with master (as BASE) + HEAD=$(git rev-parse HEAD) + BASE=$(git merge-base HEAD master) + echo "Using BASE: $BASE" + echo "Using HEAD: $HEAD" + + # Make a random temp dir and save it as TMPDIR + TMPDIR=$(mktemp -d) + echo "Using temporary directory: $TMPDIR" + + # Create required directories + mkdir -p "$TMPDIR/datadir" + mkdir -p "$TMPDIR/png" + mkdir -p "$TMPDIR/binaries" + + # Build binaries + just build-assumeutxo-binaries "$BASE" "$HEAD" + cp build-head/src/bitcoind "$TMPDIR/binaries/bitcoind-head" + cp build-base/src/bitcoind "$TMPDIR/binaries/bitcoind-base" + + # Fetch utxo-signet-160000.dat if not exists in $CWD + if [ ! -f "./utxo-signet-160000.dat" ]; then + echo "Downloading utxo-signet-160000.dat..." + if command -v curl &> /dev/null; then + curl -L -o "./utxo-signet-160000.dat" "https://tmp.256k1.dev/utxo-signet-160000.dat" + elif command -v wget &> /dev/null; then + wget -O "./utxo-signet-160000.dat" "https://tmp.256k1.dev/utxo-signet-160000.dat" + else + echo "Error: Neither curl nor wget is available. Please install one of them." + exit 1 + fi + echo "Download complete." + else + echo "Using existing utxo-signet-160000.dat" + fi + + # Run signet CI + CI=1 just run-assumeutxo-signet-ci \ + "$BASE" \ + "$HEAD" \ + "$TMPDIR/datadir" \ + "$PWD/utxo-signet-160000.dat" \ + "$TMPDIR/result" \ + 16000 \ + "$TMPDIR/png" \ + "$TMPDIR/binaries" + + echo "Results saved in: $TMPDIR/result" + echo "PNG files saved in: $TMPDIR/png" From 1d834adf4bb131e94c39f1c47c0e3578e9381269 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Tue, 7 Jan 2025 10:14:42 +0000 Subject: [PATCH 26/44] use flamegraph-rs --- .github/workflows/ci.yml | 2 +- .github/workflows/publish-results.yml | 8 ++++---- bench-ci/run-assumeutxo-bench.sh | 2 +- shell.nix | 1 + 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c10187a17c35..f202334c0609 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -100,7 +100,7 @@ jobs: - uses: actions/upload-artifact@v4 with: name: flamegraph-${{ matrix.network }} - path: "**/*-flamegraph.html" + path: "**/*-flamegraph.svg" - name: Write GitHub and runner context files env: GITHUB_CONTEXT: ${{ toJSON(github) }} diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index ced2c75068fd..aba9de2c8524 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -43,7 +43,7 @@ jobs: mkdir -p "${network}-metadata" mv "run-metadata-${network}"/* "${network}-metadata/" fi - + if [ -d "pngs-${network}" ]; then mkdir -p "${network}-plots" mv "pngs-${network}"/*.png "${network}-plots/" @@ -122,7 +122,7 @@ jobs: fs.copyFileSync(sourceFile, targetFile); }); } - + // Move plots if (fs.existsSync(plotsDir)) { const targetPlotsDir = `${resultDir}/${network}-plots`; @@ -164,7 +164,7 @@ jobs: .filter(result => result.network === network) .map(result => { const commitShortId = result.parameters.commit.slice(0, 8); - const flameGraphFile = `${network}-${result.parameters.commit}-flamegraph.html`; + const flameGraphFile = `${network}-${result.parameters.commit}-flamegraph.svg`; const flameGraphPath = `${resultDir}/${flameGraphFile}`; return ` @@ -194,7 +194,7 @@ jobs: ${fs.existsSync(flameGraphPath) ? ` - + ` : ''} ${fs.existsSync("${network}-plots/cache_vs_height.png") ? `

Additional Plots

diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index fb60976ee05a..745238f946a1 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -141,7 +141,7 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 perf script flamegraph taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ + "taskset -c 1 flamegraph -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ -L commit "base,head" } diff --git a/shell.nix b/shell.nix index 4bc5e29061f3..55f46e38e32a 100644 --- a/shell.nix +++ b/shell.nix @@ -54,6 +54,7 @@ in pkgs.mkShell { uv # Benchmarking + cargo-flamegraph flamegraph hyperfine jq From f3e9c30a2a8890921268d39bacc90732346d172e Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Mon, 6 Jan 2025 21:23:58 +0000 Subject: [PATCH 27/44] perf frequency 99 # Conflicts: # bench-ci/run-assumeutxo-bench.sh --- bench-ci/run-assumeutxo-bench.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 745238f946a1..5f3f7ddad844 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -141,7 +141,7 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 flamegraph -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ + "taskset -c 1 flamegraph -F 99 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ -L commit "base,head" } From 56ff6416a0d4053b1decda8f10c1ec3858e6fa4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 12:30:11 +0100 Subject: [PATCH 28/44] Don't show output in hyperfine --- bench-ci/run-assumeutxo-bench.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 5f3f7ddad844..20a8dec1a457 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -137,7 +137,6 @@ run_benchmark() { --conclude "conclude_assumeutxo_snapshot_run {commit} ${TMP_DATADIR} ${png_dir}" \ --cleanup "cleanup_assumeutxo_snapshot_run ${TMP_DATADIR}" \ --runs 1 \ - --show-output \ --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ From c5b1ec90d6a6df01f94b8e7f6098637d3c0ba2b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 12:39:06 +0100 Subject: [PATCH 29/44] Disable remaining unnecessary cmake modules --- justfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/justfile b/justfile index 3f166cfde725..0e189db7b442 100644 --- a/justfile +++ b/justfile @@ -16,9 +16,12 @@ build-assumeutxo-binaries base_commit head_commit: git checkout "$commit" taskset -c 0-15 cmake -B "build-$name" \ -DBUILD_BENCH=OFF \ + -DBUILD_CLI=OFF \ -DBUILD_TESTS=OFF \ -DBUILD_TX=OFF \ -DBUILD_UTIL=OFF \ + -DENABLE_EXTERNAL_SIGNER=OFF \ + -DENABLE_WALLET=OFF \ -DINSTALL_MAN=OFF \ -DCMAKE_BUILD_TYPE=RelWithDebInfo \ -DCMAKE_C_COMPILER_LAUNCHER=ccache \ From 4db595f810ec62238e1242e68d97e4626148b03e Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Tue, 7 Jan 2025 13:28:34 +0000 Subject: [PATCH 30/44] move flamegraph properly --- bench-ci/run-assumeutxo-bench.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 20a8dec1a457..1cc7f8c68f09 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -92,8 +92,8 @@ conclude_assumeutxo_snapshot_run() { fi # Move flamegraph if exists - if [ -e flamegraph.html ]; then - mv flamegraph.html "${commit}"-flamegraph.html + if [ -e flamegraph.svg ]; then + mv flamegraph.svg "${commit}"-flamegraph.svg fi } From d83a827144fad956b75fd1f3ad99739b32021364 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 13:17:53 +0100 Subject: [PATCH 31/44] TODO speed up feedback-loop temporarily --- justfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/justfile b/justfile index 0e189db7b442..b808a7048eaa 100644 --- a/justfile +++ b/justfile @@ -33,17 +33,17 @@ build-assumeutxo-binaries base_commit head_commit: # Run signet assumeutxo CI workflow [group('ci')] run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 200000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 161000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for default cache [group('ci')] run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 801000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for large cache [group('ci')] run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 850000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 801000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run a signet benchmark locally [group('local')] From 25d4d0f06c517a9d41ea2c7cf59140553c9031ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 15:03:47 +0100 Subject: [PATCH 32/44] 801000 -> 841000 --- justfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/justfile b/justfile index b808a7048eaa..f64cd0de2629 100644 --- a/justfile +++ b/justfile @@ -38,12 +38,12 @@ run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_f # Run mainnet assumeutxo CI workflow for default cache [group('ci')] run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 801000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 841000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for large cache [group('ci')] run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 801000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 841000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run a signet benchmark locally [group('local')] From d13b665ac8b114d29d77d82ce3fddc20977fa2ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 13:31:24 +0100 Subject: [PATCH 33/44] Increase plot resolution & Coins Cache Size vs Time --- .github/workflows/publish-results.yml | 2 +- bench-ci/parse_and_plot.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index aba9de2c8524..d95f8fbc332f 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -203,7 +203,7 @@ jobs: Cache vs Height Cache vs Time Tx vs Height - Coins cache vs Height + Coins cache vs Time
` : ''} `; diff --git a/bench-ci/parse_and_plot.py b/bench-ci/parse_and_plot.py index 28315605f240..8fa17304c17b 100755 --- a/bench-ci/parse_and_plot.py +++ b/bench-ci/parse_and_plot.py @@ -29,7 +29,7 @@ def parse_log_file(log_file): def generate_plot(x, y, x_label, y_label, title, output_file): - plt.figure(figsize=(20, 10)) + plt.figure(figsize=(30, 10)) plt.plot(x, y) plt.title(title) plt.xlabel(x_label) @@ -61,6 +61,6 @@ def generate_plot(x, y, x_label, y_label, title, output_file): generate_plot(heights, cache_size, "Block Height", "Cache Size (MiB)", "Cache Size vs Block Height", os.path.join(png_dir, "cache_vs_height.png")) generate_plot(float_minutes, cache_size, "Elapsed minutes", "Cache Size (MiB)", "Cache Size vs Time", os.path.join(png_dir, "cache_vs_time.png")) generate_plot(heights, tx_counts, "Block Height", "Transaction Count", "Transactions vs Block Height", os.path.join(png_dir, "tx_vs_height.png")) - generate_plot(heights, cache_count, "Block Height", "Coins Cache Size", "Coins Cache Size vs Block Height", os.path.join(png_dir, "coins_cache_vs_height.png")) + generate_plot(times, cache_count, "Block Height", "Coins Cache Size", "Coins Cache Size vs Time", os.path.join(png_dir, "coins_cache_vs_time.png")) print("Plots saved!") From 73cdf70fcc02d57804fd8ee833883b8f7f8fd6d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 14:36:45 +0100 Subject: [PATCH 34/44] Add leveldb, bench, validation and coindb debug log --- .github/workflows/publish-results.yml | 3 +- bench-ci/parse_and_plot.py | 133 ++++++++++++++++++++++++-- bench-ci/run-assumeutxo-bench.sh | 2 +- 3 files changed, 127 insertions(+), 11 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index d95f8fbc332f..888a33427cc7 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -55,6 +55,7 @@ jobs: with: script: | const fs = require('fs'); + const path = require('path'); const networks = process.env.NETWORKS.split(','); let prNumber = 'master'; let runId; @@ -110,7 +111,7 @@ jobs: // Calculate speedup if we have both measurements if (baseMean && headMean) { - const speedup = ((baseMean - headMean) / baseMean * 100).toFixed(1); + const speedup = baseMean > 0 ? ((baseMean - headMean) / baseMean * 100).toFixed(1) : 'N/A'; combinedResults.speedups[network] = speedup; } diff --git a/bench-ci/parse_and_plot.py b/bench-ci/parse_and_plot.py index 8fa17304c17b..f282966c47d9 100755 --- a/bench-ci/parse_and_plot.py +++ b/bench-ci/parse_and_plot.py @@ -6,7 +6,7 @@ import matplotlib.pyplot as plt -def parse_line(line): +def parse_updatetip_line(line): match = re.match( r'^([\d\-:TZ]+) UpdateTip: new best.+height=(\d+).+tx=(\d+).+cache=([\d.]+)MiB\((\d+)txo\)', line @@ -18,14 +18,93 @@ def parse_line(line): return parsed_datetime, int(height_str), int(tx_str), float(cache_size_mb_str), int(cache_coins_count_str) +def parse_leveldb_compact_line(line): + match = re.match(r'^([\d\-:TZ]+) \[leveldb] Compacting.*files', line) + if not match: + return None + iso_str = match.groups()[0] + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime + + +def parse_leveldb_generated_table_line(line): + match = re.match(r'^([\d\-:TZ]+) \[leveldb] Generated table.*: (\d+) keys, (\d+) bytes', line) + if not match: + return None + iso_str, keys_count_str, bytes_count_str = match.groups() + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime, int(keys_count_str), int(bytes_count_str) + + +def parse_bench_blockindex_line(line): + match = re.match(r'^([\d\-:TZ]+) block index\s+([\d.]+)ms', line) + if not match: + return None + iso_str, time_ms = match.groups() + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime, float(time_ms) + + +def parse_validation_txadd_line(line): + match = re.match(r'^([\d\-:TZ]+) \[validation] TransactionAddedToMempool: txid=.+wtxid=.+', line) + if not match: + return None + iso_str = match.groups()[0] + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime + + +def parse_coindb_write_batch_line(line): + match = re.match(r'^([\d\-:TZ]+) \[coindb] Writing (partial|final) batch of ([\d.]+) MiB', line) + if not match: + return None + iso_str, is_partial_str, size_mb_str = match.groups() + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime, is_partial_str, float(size_mb_str) + + +def parse_coindb_commit_line(line): + match = re.match(r'^([\d\-:TZ]+) \[coindb] Committed (\d+) changed transaction outputs', line) + if not match: + return None + iso_str, txout_count_str = match.groups() + parsed_datetime = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return parsed_datetime, int(txout_count_str) + def parse_log_file(log_file): with open(log_file, 'r', encoding='utf-8') as f: - data = [result for line in f if (result := parse_line(line))] - if not data: - print("No UpdateTip entries found.") - sys.exit(0) - assert all(data[i][0] <= data[i + 1][0] for i in range(len(data) - 1)), "Entries are not sorted by time" - return data + update_tip_data = [] + leveldb_compact_data = [] + leveldb_gen_table_data = [] + bench_blockindex_data = [] + validation_txadd_data = [] + coindb_write_batch_data = [] + coindb_commit_data = [] + + for line in f: + if result := parse_updatetip_line(line): + update_tip_data.append(result) + elif result := parse_leveldb_compact_line(line): + leveldb_compact_data.append(result) + elif result := parse_leveldb_generated_table_line(line): + leveldb_gen_table_data.append(result) + elif result := parse_bench_blockindex_line(line): + bench_blockindex_data.append(result) + elif result := parse_validation_txadd_line(line): + validation_txadd_data.append(result) + elif result := parse_coindb_write_batch_line(line): + coindb_write_batch_data.append(result) + elif result := parse_coindb_commit_line(line): + coindb_commit_data.append(result) + + if not update_tip_data: + print("No UpdateTip entries found.") + sys.exit(0) + + assert all(update_tip_data[i][0] <= update_tip_data[i + 1][0] for i in + range(len(update_tip_data) - 1)), "UpdateTip entries are not sorted by time" + + return update_tip_data, leveldb_compact_data, leveldb_gen_table_data, bench_blockindex_data, validation_txadd_data, coindb_write_batch_data, coindb_commit_data def generate_plot(x, y, x_label, y_label, title, output_file): @@ -54,7 +133,9 @@ def generate_plot(x, y, x_label, y_label, title, output_file): png_dir = sys.argv[2] os.makedirs(png_dir, exist_ok=True) - times, heights, tx_counts, cache_size, cache_count = zip(*parse_log_file(log_file)) + update_tip_data, leveldb_compact_data, leveldb_gen_table_data, bench_blockindex_data, validation_txadd_data, coindb_write_batch_data, coindb_commit_data = parse_log_file( + log_file) + times, heights, tx_counts, cache_size, cache_count = zip(*update_tip_data) float_minutes = [(t - times[0]).total_seconds() / 60 for t in times] generate_plot(float_minutes, heights, "Elapsed minutes", "Block Height", "Block Height vs Time", os.path.join(png_dir, "height_vs_time.png")) @@ -63,4 +144,38 @@ def generate_plot(x, y, x_label, y_label, title, output_file): generate_plot(heights, tx_counts, "Block Height", "Transaction Count", "Transactions vs Block Height", os.path.join(png_dir, "tx_vs_height.png")) generate_plot(times, cache_count, "Block Height", "Coins Cache Size", "Coins Cache Size vs Time", os.path.join(png_dir, "coins_cache_vs_time.png")) - print("Plots saved!") + # LevelDB Compaction and Generated Tables + if leveldb_compact_data: + leveldb_compact_times = [(t - times[0]).total_seconds() / 60 for t in leveldb_compact_data] + leveldb_compact_y = [1 for _ in leveldb_compact_times] # dummy y axis to mark compactions + generate_plot(leveldb_compact_times, leveldb_compact_y, "Elapsed minutes", "LevelDB Compaction", "LevelDB Compaction Events vs Time", os.path.join(png_dir, "leveldb_compact_vs_time.png")) + if leveldb_gen_table_data: + leveldb_gen_table_times, leveldb_gen_table_keys, leveldb_gen_table_bytes = zip(*leveldb_gen_table_data) + leveldb_gen_table_float_minutes = [(t - times[0]).total_seconds() / 60 for t in leveldb_gen_table_times] + generate_plot(leveldb_gen_table_float_minutes, leveldb_gen_table_keys, "Elapsed minutes", "Number of keys", "LevelDB Keys Generated vs Time", os.path.join(png_dir, "leveldb_gen_keys_vs_time.png")) + generate_plot(leveldb_gen_table_float_minutes, leveldb_gen_table_bytes, "Elapsed minutes", "Number of bytes", "LevelDB Bytes Generated vs Time", os.path.join(png_dir, "leveldb_gen_bytes_vs_time.png")) + + # Bench block index load time + if bench_blockindex_data: + bench_blockindex_times, bench_blockindex_times_ms = zip(*bench_blockindex_data) + bench_blockindex_float_minutes = [(t - times[0]).total_seconds() / 60 for t in bench_blockindex_times] + generate_plot(bench_blockindex_float_minutes, bench_blockindex_times_ms, "Elapsed minutes", "time(ms)", "Block Index Load Time vs Time", os.path.join(png_dir, "bench_blockindex_vs_time.png")) + + # validation mempool add transaction lines + if validation_txadd_data: + validation_txadd_times = [(t - times[0]).total_seconds() / 60 for t in validation_txadd_data] + validation_txadd_y = [1 for _ in validation_txadd_times] # dummy y axis to mark transaction additions + generate_plot(validation_txadd_times, validation_txadd_y, "Elapsed minutes", "Transaction Additions", "Transaction Additions to Mempool vs Time", os.path.join(png_dir, "validation_txadd_vs_time.png")) + + # coindb write batch lines + if coindb_write_batch_data: + coindb_write_batch_times, is_partial_strs, sizes_mb = zip(*coindb_write_batch_data) + coindb_write_batch_float_minutes = [(t - times[0]).total_seconds() / 60 for t in coindb_write_batch_times] + generate_plot(coindb_write_batch_float_minutes, sizes_mb, "Elapsed minutes", "Batch Size MiB", "Coin Database Partial/Final Write Batch Size vs Time", os.path.join(png_dir, "coindb_write_batch_size_vs_time.png")) + if coindb_commit_data: + coindb_commit_times, txout_counts = zip(*coindb_commit_data) + coindb_commit_float_minutes = [(t - times[0]).total_seconds() / 60 for t in coindb_commit_times] + generate_plot(coindb_commit_float_minutes, txout_counts, "Elapsed minutes", "Transaction Output Count", "Coin Database Transaction Output Committed vs Time", os.path.join(png_dir, "coindb_commit_txout_vs_time.png")) + + + print("Plots saved!") \ No newline at end of file diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 1cc7f8c68f09..5f1279f8da8a 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -140,7 +140,7 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 flamegraph -F 99 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0" \ + "taskset -c 1 flamegraph -F 99 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0 -debug=coindb -debug=leveldb -debug=bench -debug=validation" \ -L commit "base,head" } From 06dcefb1017e251c22b4f2f1312c550644730578 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 14:38:57 +0100 Subject: [PATCH 35/44] Publish all pngs --- .github/workflows/publish-results.yml | 32 +++++++++++++-------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 888a33427cc7..c6330149df1f 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -111,7 +111,7 @@ jobs: // Calculate speedup if we have both measurements if (baseMean && headMean) { - const speedup = baseMean > 0 ? ((baseMean - headMean) / baseMean * 100).toFixed(1) : 'N/A'; + const speedup = baseMean > 0 ? ((baseMean - headMean) / baseMean * 100).toFixed(1) : 'N/A'; combinedResults.speedups[network] = speedup; } @@ -125,11 +125,11 @@ jobs: } // Move plots - if (fs.existsSync(plotsDir)) { + if (fs.existsSync(`${network}-plots`)) { const targetPlotsDir = `${resultDir}/${network}-plots`; fs.mkdirSync(targetPlotsDir, { recursive: true }); - fs.readdirSync(plotsDir).forEach(plot => { - const sourcePlot = `${plotsDir}/${plot}`; + fs.readdirSync(`${network}-plots`).forEach(plot => { + const sourcePlot = `${network}-plots/${plot}`; const targetPlot = `${targetPlotsDir}/${plot}`; fs.copyFileSync(sourcePlot, targetPlot); }); @@ -166,7 +166,15 @@ jobs: .map(result => { const commitShortId = result.parameters.commit.slice(0, 8); const flameGraphFile = `${network}-${result.parameters.commit}-flamegraph.svg`; - const flameGraphPath = `${resultDir}/${flameGraphFile}`; + const flameGraphPath = `${resultDir}/${network}-${result.parameters.commit}-flamegraph.svg`; + + // Query PNG files dynamically + const plotDir = `${resultDir}/${network}-plots`; + const plots = fs.existsSync(plotDir) + ? fs.readdirSync(plotDir) + .map(plot => `${plot}`) + .join('') + : ''; return ` @@ -193,20 +201,10 @@ jobs:
- ${fs.existsSync(flameGraphPath) ? ` ` : ''} - ${fs.existsSync("${network}-plots/cache_vs_height.png") ? ` -

Additional Plots

-
- Height vs Time - Cache vs Height - Cache vs Time - Tx vs Height - Coins cache vs Time -
- ` : ''} + ${plots} `; }).join('')}
@@ -291,5 +289,5 @@ jobs: run: | gh pr comment ${{ needs.build.outputs.pr-number }} \ --repo ${{ github.repository }} \ - --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed. + --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed. 🚀 Speedups: ${{ needs.build.outputs.speedups }}" From f4963af02ad76a87e98ce25da0c9da23fd38931a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Tue, 7 Jan 2025 16:23:39 +0100 Subject: [PATCH 36/44] Rename `master` back to `main` --- .github/workflows/publish-results.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index c6330149df1f..813e5f90d4db 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -57,7 +57,7 @@ jobs: const fs = require('fs'); const path = require('path'); const networks = process.env.NETWORKS.split(','); - let prNumber = 'master'; + let prNumber = 'main'; let runId; // First, extract metadata and get PR number @@ -283,7 +283,7 @@ jobs: actions: read steps: - name: Comment on PR - if: ${{ needs.build.outputs.pr-number != 'master' }} + if: ${{ needs.build.outputs.pr-number != 'main' }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | From 256c854254d6a375cbbc45b7201d768a5f7e8aba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Wed, 8 Jan 2025 09:26:15 +0100 Subject: [PATCH 37/44] Fix posted URL --- .github/workflows/publish-results.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 813e5f90d4db..1daf2d0fdaae 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -289,5 +289,5 @@ jobs: run: | gh pr comment ${{ needs.build.outputs.pr-number }} \ --repo ${{ github.repository }} \ - --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed. + --body "📊 Benchmark results for this run (${{ github.event.workflow_run.id }}) will be available at: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${{ needs.build.outputs.pr-number }}/${{ github.event.workflow_run.id }}/index.html after the github pages \"build and deployment\" action has completed. 🚀 Speedups: ${{ needs.build.outputs.speedups }}" From 051a06c02be9653bcb305db3997b22bf190ddc3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Wed, 8 Jan 2025 09:29:07 +0100 Subject: [PATCH 38/44] Restore signet & main block counts --- justfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/justfile b/justfile index f64cd0de2629..0341a36fc271 100644 --- a/justfile +++ b/justfile @@ -33,17 +33,17 @@ build-assumeutxo-binaries base_commit head_commit: # Run signet assumeutxo CI workflow [group('ci')] run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 161000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 200000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for default cache [group('ci')] run-assumeutxo-mainnet-default-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 841000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 855000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for large cache [group('ci')] run-assumeutxo-mainnet-large-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 841000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} main 855000 "148.251.128.115:33333" {{ dbcache }} {{ binaries_dir }} # Run a signet benchmark locally [group('local')] From 5622ea62525f9e986fefe43d7c330b2cdecf6ae7 Mon Sep 17 00:00:00 2001 From: willcl-ark Date: Wed, 8 Jan 2025 10:35:31 +0000 Subject: [PATCH 39/44] fix flame viewport height --- .github/workflows/publish-results.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml index 1daf2d0fdaae..bb6bd2f3bae4 100644 --- a/.github/workflows/publish-results.yml +++ b/.github/workflows/publish-results.yml @@ -202,7 +202,7 @@ jobs: ${fs.existsSync(flameGraphPath) ? ` - + ` : ''} ${plots} `; From 5800b6672bc06365e2fa4a1f8eca845acef5b5e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Wed, 8 Jan 2025 12:27:40 +0100 Subject: [PATCH 40/44] Compile with debug symbols --- justfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/justfile b/justfile index 0341a36fc271..b7bf61414db2 100644 --- a/justfile +++ b/justfile @@ -26,7 +26,7 @@ build-assumeutxo-binaries base_commit head_commit: -DCMAKE_BUILD_TYPE=RelWithDebInfo \ -DCMAKE_C_COMPILER_LAUNCHER=ccache \ -DCMAKE_CXX_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer" + -DCMAKE_CXX_FLAGS="-fno-omit-frame-pointer -g" taskset -c 0-15 cmake --build "build-$name" -j {{ num_cpus() }} done From 833ef1af7a35e228b1dcf3c631c94802c168596c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Wed, 8 Jan 2025 12:30:11 +0100 Subject: [PATCH 41/44] Reduce flame font size --- bench-ci/run-assumeutxo-bench.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 5f1279f8da8a..6e331a45390b 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -140,7 +140,7 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 flamegraph -F 99 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0 -debug=coindb -debug=leveldb -debug=bench -debug=validation" \ + "taskset -c 1 flamegraph -F 99 --fontsize 10 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0 -debug=coindb -debug=leveldb -debug=bench -debug=validation" \ -L commit "base,head" } From c9193b792d65ecb2f7cf3e1cba077f5b02fbf425 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Wed, 8 Jan 2025 12:35:38 +0100 Subject: [PATCH 42/44] Increase signet to 220k --- justfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/justfile b/justfile index b7bf61414db2..72e3f6040072 100644 --- a/justfile +++ b/justfile @@ -33,7 +33,7 @@ build-assumeutxo-binaries base_commit head_commit: # Run signet assumeutxo CI workflow [group('ci')] run-assumeutxo-signet-ci base_commit head_commit TMP_DATADIR UTXO_PATH results_file dbcache png_dir binaries_dir: - ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 200000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} + ./bench-ci/run-assumeutxo-bench.sh {{ base_commit }} {{ head_commit }} {{ TMP_DATADIR }} {{ UTXO_PATH }} {{ results_file }} {{ png_dir }} signet 220000 "148.251.128.115:55555" {{ dbcache }} {{ binaries_dir }} # Run mainnet assumeutxo CI workflow for default cache [group('ci')] From 59218d7902d69ef4179770a0a9d9dbb61e07678c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Wed, 8 Jan 2025 12:45:36 +0100 Subject: [PATCH 43/44] Revert: Reduce flame font size --- bench-ci/run-assumeutxo-bench.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bench-ci/run-assumeutxo-bench.sh b/bench-ci/run-assumeutxo-bench.sh index 6e331a45390b..5f1279f8da8a 100755 --- a/bench-ci/run-assumeutxo-bench.sh +++ b/bench-ci/run-assumeutxo-bench.sh @@ -140,7 +140,7 @@ run_benchmark() { --export-json "${results_file}" \ --command-name "base (${base_commit})" \ --command-name "head (${head_commit})" \ - "taskset -c 1 flamegraph -F 99 --fontsize 10 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0 -debug=coindb -debug=leveldb -debug=bench -debug=validation" \ + "taskset -c 1 flamegraph -F 99 -- taskset -c 2-15 ${BINARIES_DIR}/bitcoind-{commit} -datadir=${TMP_DATADIR} -connect=${connect_address} -daemon=0 -chain=${chain} -stopatheight=${stop_at_height} -dbcache=${dbcache} -printtoconsole=0 -debug=coindb -debug=leveldb -debug=bench -debug=validation" \ -L commit "base,head" } From 128abd9a28cfdf19af07a894a34916435f0596f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C5=91rinc?= Date: Sun, 5 Jan 2025 23:53:30 +0100 Subject: [PATCH 44/44] m_coinstip_cache_size_bytes --- src/coins.cpp | 16 +++++++++++++--- src/coins.h | 3 ++- src/validation.cpp | 10 +++++----- 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/coins.cpp b/src/coins.cpp index 75d11b4f267b..0ba3fdc9f40c 100644 --- a/src/coins.cpp +++ b/src/coins.cpp @@ -34,8 +34,10 @@ bool CCoinsViewBacked::BatchWrite(CoinsViewCacheCursor& cursor, const uint256 &h std::unique_ptr CCoinsViewBacked::Cursor() const { return base->Cursor(); } size_t CCoinsViewBacked::EstimateSize() const { return base->EstimateSize(); } -CCoinsViewCache::CCoinsViewCache(CCoinsView* baseIn, bool deterministic) : - CCoinsViewBacked(baseIn), m_deterministic(deterministic), +CCoinsViewCache::CCoinsViewCache(CCoinsView* baseIn, size_t coinstip_cache_size_bytes, bool deterministic) : + CCoinsViewBacked(baseIn), + m_coinstip_cache_size_bytes(coinstip_cache_size_bytes), + m_deterministic(deterministic), cacheCoins(0, SaltedOutpointHasher(/*deterministic=*/deterministic), CCoinsMap::key_equal{}, &m_cache_coins_memory_resource) { m_sentinel.second.SelfRef(m_sentinel); @@ -315,7 +317,15 @@ void CCoinsViewCache::ReallocateCache() cacheCoins.~CCoinsMap(); m_cache_coins_memory_resource.~CCoinsMapMemoryResource(); ::new (&m_cache_coins_memory_resource) CCoinsMapMemoryResource{}; - ::new (&cacheCoins) CCoinsMap{0, SaltedOutpointHasher{/*deterministic=*/m_deterministic}, CCoinsMap::key_equal{}, &m_cache_coins_memory_resource}; + auto x = sizeof(CoinsCachePair) + 20; // TODO (cache size bytes / cache count) = ~133 + size_t max_cache_size = m_coinstip_cache_size_bytes / x; + LogInfo("CCoinsViewCache::ReallocateCache: creating new CCoinsMap with %d elements", max_cache_size); + ::new (&cacheCoins) CCoinsMap{ + max_cache_size, + SaltedOutpointHasher{/*deterministic=*/m_deterministic}, + CCoinsMap::key_equal{}, + &m_cache_coins_memory_resource + }; } void CCoinsViewCache::SanityCheck() const diff --git a/src/coins.h b/src/coins.h index a2449e1b8154..de4b727b38f4 100644 --- a/src/coins.h +++ b/src/coins.h @@ -356,6 +356,7 @@ class CCoinsViewBacked : public CCoinsView class CCoinsViewCache : public CCoinsViewBacked { private: + const size_t m_coinstip_cache_size_bytes; const bool m_deterministic; protected: @@ -373,7 +374,7 @@ class CCoinsViewCache : public CCoinsViewBacked mutable size_t cachedCoinsUsage{0}; public: - CCoinsViewCache(CCoinsView *baseIn, bool deterministic = false); + CCoinsViewCache(CCoinsView *baseIn, size_t coinstip_cache_size_bytes = 0, bool deterministic = false); /** * By deleting the copy constructor, we prevent accidentally using it when one intends to create a cache on top of a base cache. diff --git a/src/validation.cpp b/src/validation.cpp index a387bcd2dbdf..6f18f8402012 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -3054,7 +3054,7 @@ bool Chainstate::DisconnectTip(BlockValidationState& state, DisconnectedBlockTra // Apply the block atomically to the chain state. const auto time_start{SteadyClock::now()}; { - CCoinsViewCache view(&CoinsTip()); + CCoinsViewCache view(&CoinsTip(), m_coinstip_cache_size_bytes); assert(view.GetBestBlock() == pindexDelete->GetBlockHash()); if (DisconnectBlock(block, pindexDelete, view) != DISCONNECT_OK) { LogError("DisconnectTip(): DisconnectBlock %s failed\n", pindexDelete->GetBlockHash().ToString()); @@ -3175,7 +3175,7 @@ bool Chainstate::ConnectTip(BlockValidationState& state, CBlockIndex* pindexNew, LogDebug(BCLog::BENCH, " - Load block from disk: %.2fms\n", Ticks(time_2 - time_1)); { - CCoinsViewCache view(&CoinsTip()); + CCoinsViewCache view(&CoinsTip(), m_coinstip_cache_size_bytes); bool rv = ConnectBlock(blockConnecting, state, pindexNew, view); if (m_chainman.m_options.signals) { m_chainman.m_options.signals->BlockChecked(blockConnecting, state); @@ -4638,7 +4638,7 @@ bool TestBlockValidity(BlockValidationState& state, { AssertLockHeld(cs_main); assert(pindexPrev && pindexPrev == chainstate.m_chain.Tip()); - CCoinsViewCache viewNew(&chainstate.CoinsTip()); + CCoinsViewCache viewNew(&chainstate.CoinsTip(), chainstate.m_coinstip_cache_size_bytes); uint256 block_hash(block.GetHash()); CBlockIndex indexDummy(block); indexDummy.pprev = pindexPrev; @@ -4733,7 +4733,7 @@ VerifyDBResult CVerifyDB::VerifyDB( } nCheckLevel = std::max(0, std::min(4, nCheckLevel)); LogPrintf("Verifying last %i blocks at level %i\n", nCheckDepth, nCheckLevel); - CCoinsViewCache coins(&coinsview); + CCoinsViewCache coins(&coinsview, chainstate.m_coinstip_cache_size_bytes); CBlockIndex* pindex; CBlockIndex* pindexFailure = nullptr; int nGoodTransactions = 0; @@ -4882,7 +4882,7 @@ bool Chainstate::ReplayBlocks() LOCK(cs_main); CCoinsView& db = this->CoinsDB(); - CCoinsViewCache cache(&db); + CCoinsViewCache cache(&db, m_coinstip_cache_size_bytes); std::vector hashHeads = db.GetHeadBlocks(); if (hashHeads.empty()) return true; // We're already in a consistent state.